You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/07/06 20:13:31 UTC
[16/53] [abbrv] [partial] hbase git commit: HBASE-17056 Remove
checked in PB generated files Selective add of dependency on hbase-thirdparty
jars. Update to READMEs on how protobuf is done (and update to refguide)
Removed all checked in generated protobu
http://git-wip-us.apache.org/repos/asf/hbase/blob/df93c13f/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RpcController.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RpcController.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RpcController.java
deleted file mode 100644
index 99e306b..0000000
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RpcController.java
+++ /dev/null
@@ -1,118 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-package org.apache.hadoop.hbase.shaded.com.google.protobuf;
-
-/**
- * <p>An {@code RpcController} mediates a single method call. The primary
- * purpose of the controller is to provide a way to manipulate settings
- * specific to the RPC implementation and to find out about RPC-level errors.
- *
- * <p>Starting with version 2.3.0, RPC implementations should not try to build
- * on this, but should instead provide code generator plugins which generate
- * code specific to the particular RPC implementation. This way the generated
- * code can be more appropriate for the implementation in use and can avoid
- * unnecessary layers of indirection.
- *
- * <p>The methods provided by the {@code RpcController} interface are intended
- * to be a "least common denominator" set of features which we expect all
- * implementations to support. Specific implementations may provide more
- * advanced features (e.g. deadline propagation).
- *
- * @author kenton@google.com Kenton Varda
- */
-public interface RpcController {
- // -----------------------------------------------------------------
- // These calls may be made from the client side only. Their results
- // are undefined on the server side (may throw RuntimeExceptions).
-
- /**
- * Resets the RpcController to its initial state so that it may be reused in
- * a new call. This can be called from the client side only. It must not
- * be called while an RPC is in progress.
- */
- void reset();
-
- /**
- * After a call has finished, returns true if the call failed. The possible
- * reasons for failure depend on the RPC implementation. {@code failed()}
- * most only be called on the client side, and must not be called before a
- * call has finished.
- */
- boolean failed();
-
- /**
- * If {@code failed()} is {@code true}, returns a human-readable description
- * of the error.
- */
- String errorText();
-
- /**
- * Advises the RPC system that the caller desires that the RPC call be
- * canceled. The RPC system may cancel it immediately, may wait awhile and
- * then cancel it, or may not even cancel the call at all. If the call is
- * canceled, the "done" callback will still be called and the RpcController
- * will indicate that the call failed at that time.
- */
- void startCancel();
-
- // -----------------------------------------------------------------
- // These calls may be made from the server side only. Their results
- // are undefined on the client side (may throw RuntimeExceptions).
-
- /**
- * Causes {@code failed()} to return true on the client side. {@code reason}
- * will be incorporated into the message returned by {@code errorText()}.
- * If you find you need to return machine-readable information about
- * failures, you should incorporate it into your response protocol buffer
- * and should NOT call {@code setFailed()}.
- */
- void setFailed(String reason);
-
- /**
- * If {@code true}, indicates that the client canceled the RPC, so the server
- * may as well give up on replying to it. This method must be called on the
- * server side only. The server should still call the final "done" callback.
- */
- boolean isCanceled();
-
- /**
- * Asks that the given callback be called when the RPC is canceled. The
- * parameter passed to the callback will always be {@code null}. The
- * callback will always be called exactly once. If the RPC completes without
- * being canceled, the callback will be called after completion. If the RPC
- * has already been canceled when NotifyOnCancel() is called, the callback
- * will be called immediately.
- *
- * <p>{@code notifyOnCancel()} must be called no more than once per request.
- * It must be called on the server side only.
- */
- void notifyOnCancel(RpcCallback<Object> callback);
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/df93c13f/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RpcUtil.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RpcUtil.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RpcUtil.java
deleted file mode 100644
index c2a6878..0000000
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RpcUtil.java
+++ /dev/null
@@ -1,136 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-package org.apache.hadoop.hbase.shaded.com.google.protobuf;
-
-/**
- * Grab-bag of utility functions useful when dealing with RPCs.
- *
- * @author kenton@google.com Kenton Varda
- */
-public final class RpcUtil {
- private RpcUtil() {}
-
- /**
- * Take an {@code RpcCallback<Message>} and convert it to an
- * {@code RpcCallback} accepting a specific message type. This is always
- * type-safe (parameter type contravariance).
- */
- @SuppressWarnings("unchecked")
- public static <Type extends Message> RpcCallback<Type>
- specializeCallback(final RpcCallback<Message> originalCallback) {
- return (RpcCallback<Type>)originalCallback;
- // The above cast works, but only due to technical details of the Java
- // implementation. A more theoretically correct -- but less efficient --
- // implementation would be as follows:
- // return new RpcCallback<Type>() {
- // public void run(Type parameter) {
- // originalCallback.run(parameter);
- // }
- // };
- }
-
- /**
- * Take an {@code RpcCallback} accepting a specific message type and convert
- * it to an {@code RpcCallback<Message>}. The generalized callback will
- * accept any message object which has the same descriptor, and will convert
- * it to the correct class before calling the original callback. However,
- * if the generalized callback is given a message with a different descriptor,
- * an exception will be thrown.
- */
- public static <Type extends Message>
- RpcCallback<Message> generalizeCallback(
- final RpcCallback<Type> originalCallback,
- final Class<Type> originalClass,
- final Type defaultInstance) {
- return new RpcCallback<Message>() {
- @Override
- public void run(final Message parameter) {
- Type typedParameter;
- try {
- typedParameter = originalClass.cast(parameter);
- } catch (ClassCastException ignored) {
- typedParameter = copyAsType(defaultInstance, parameter);
- }
- originalCallback.run(typedParameter);
- }
- };
- }
-
- /**
- * Creates a new message of type "Type" which is a copy of "source". "source"
- * must have the same descriptor but may be a different class (e.g.
- * DynamicMessage).
- */
- @SuppressWarnings("unchecked")
- private static <Type extends Message> Type copyAsType(
- final Type typeDefaultInstance, final Message source) {
- return (Type) typeDefaultInstance
- .newBuilderForType().mergeFrom(source).build();
- }
-
- /**
- * Creates a callback which can only be called once. This may be useful for
- * security, when passing a callback to untrusted code: most callbacks do
- * not expect to be called more than once, so doing so may expose bugs if it
- * is not prevented.
- */
- public static <ParameterType>
- RpcCallback<ParameterType> newOneTimeCallback(
- final RpcCallback<ParameterType> originalCallback) {
- return new RpcCallback<ParameterType>() {
- private boolean alreadyCalled = false;
-
- @Override
- public void run(final ParameterType parameter) {
- synchronized (this) {
- if (alreadyCalled) {
- throw new AlreadyCalledException();
- }
- alreadyCalled = true;
- }
-
- originalCallback.run(parameter);
- }
- };
- }
-
- /**
- * Exception thrown when a one-time callback is called more than once.
- */
- public static final class AlreadyCalledException extends RuntimeException {
- private static final long serialVersionUID = 5469741279507848266L;
-
- public AlreadyCalledException() {
- super("This RpcCallback was already called and cannot be called " +
- "multiple times.");
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/df93c13f/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Service.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Service.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Service.java
deleted file mode 100644
index 849d9ff..0000000
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Service.java
+++ /dev/null
@@ -1,117 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-package org.apache.hadoop.hbase.shaded.com.google.protobuf;
-
-/**
- * Abstract base interface for protocol-buffer-based RPC services. Services
- * themselves are abstract classes (implemented either by servers or as
- * stubs), but they subclass this base interface. The methods of this
- * interface can be used to call the methods of the service without knowing
- * its exact type at compile time (analogous to the Message interface).
- *
- * <p>Starting with version 2.3.0, RPC implementations should not try to build
- * on this, but should instead provide code generator plugins which generate
- * code specific to the particular RPC implementation. This way the generated
- * code can be more appropriate for the implementation in use and can avoid
- * unnecessary layers of indirection.
- *
- * @author kenton@google.com Kenton Varda
- */
-public interface Service {
- /**
- * Get the {@code ServiceDescriptor} describing this service and its methods.
- */
- Descriptors.ServiceDescriptor getDescriptorForType();
-
- /**
- * <p>Call a method of the service specified by MethodDescriptor. This is
- * normally implemented as a simple {@code switch()} that calls the standard
- * definitions of the service's methods.
- *
- * <p>Preconditions:
- * <ul>
- * <li>{@code method.getService() == getDescriptorForType()}
- * <li>{@code request} is of the exact same class as the object returned by
- * {@code getRequestPrototype(method)}.
- * <li>{@code controller} is of the correct type for the RPC implementation
- * being used by this Service. For stubs, the "correct type" depends
- * on the RpcChannel which the stub is using. Server-side Service
- * implementations are expected to accept whatever type of
- * {@code RpcController} the server-side RPC implementation uses.
- * </ul>
- *
- * <p>Postconditions:
- * <ul>
- * <li>{@code done} will be called when the method is complete. This may be
- * before {@code callMethod()} returns or it may be at some point in
- * the future.
- * <li>The parameter to {@code done} is the response. It must be of the
- * exact same type as would be returned by
- * {@code getResponsePrototype(method)}.
- * <li>If the RPC failed, the parameter to {@code done} will be
- * {@code null}. Further details about the failure can be found by
- * querying {@code controller}.
- * </ul>
- */
- void callMethod(Descriptors.MethodDescriptor method,
- RpcController controller,
- Message request,
- RpcCallback<Message> done);
-
- /**
- * <p>{@code callMethod()} requires that the request passed in is of a
- * particular subclass of {@code Message}. {@code getRequestPrototype()}
- * gets the default instances of this type for a given method. You can then
- * call {@code Message.newBuilderForType()} on this instance to
- * construct a builder to build an object which you can then pass to
- * {@code callMethod()}.
- *
- * <p>Example:
- * <pre>
- * MethodDescriptor method =
- * service.getDescriptorForType().findMethodByName("Foo");
- * Message request =
- * stub.getRequestPrototype(method).newBuilderForType()
- * .mergeFrom(input).build();
- * service.callMethod(method, request, callback);
- * </pre>
- */
- Message getRequestPrototype(Descriptors.MethodDescriptor method);
-
- /**
- * Like {@code getRequestPrototype()}, but gets a prototype of the response
- * message. {@code getResponsePrototype()} is generally not needed because
- * the {@code Service} implementation constructs the response message itself,
- * but it may be useful in some cases to know ahead of time what type of
- * object will be returned.
- */
- Message getResponsePrototype(Descriptors.MethodDescriptor method);
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/df93c13f/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ServiceException.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ServiceException.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ServiceException.java
deleted file mode 100644
index 9b04a12..0000000
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ServiceException.java
+++ /dev/null
@@ -1,52 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-package org.apache.hadoop.hbase.shaded.com.google.protobuf;
-
-/**
- * Thrown by blocking RPC methods when a failure occurs.
- *
- * @author cpovirk@google.com (Chris Povirk)
- */
-public class ServiceException extends Exception {
- private static final long serialVersionUID = -1219262335729891920L;
-
- public ServiceException(final String message) {
- super(message);
- }
-
- public ServiceException(final Throwable cause) {
- super(cause);
- }
-
- public ServiceException(final String message, final Throwable cause) {
- super(message, cause);
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/df93c13f/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilder.java
deleted file mode 100644
index 818901a..0000000
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilder.java
+++ /dev/null
@@ -1,241 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-package org.apache.hadoop.hbase.shaded.com.google.protobuf;
-
-/**
- * {@code SingleFieldBuilder} implements a structure that a protocol
- * message uses to hold a single field of another protocol message. It supports
- * the classical use case of setting an immutable {@link Message} as the value
- * of the field and is highly optimized around this.
- * <br>
- * It also supports the additional use case of setting a {@link Message.Builder}
- * as the field and deferring conversion of that {@code Builder}
- * to an immutable {@code Message}. In this way, it's possible to maintain
- * a tree of {@code Builder}'s that acts as a fully read/write data
- * structure.
- * <br>
- * Logically, one can think of a tree of builders as converting the entire tree
- * to messages when build is called on the root or when any method is called
- * that desires a Message instead of a Builder. In terms of the implementation,
- * the {@code SingleFieldBuilder} and {@code RepeatedFieldBuilder}
- * classes cache messages that were created so that messages only need to be
- * created when some change occurred in its builder or a builder for one of its
- * descendants.
- *
- * @param <MType> the type of message for the field
- * @param <BType> the type of builder for the field
- * @param <IType> the common interface for the message and the builder
- *
- * @author jonp@google.com (Jon Perlow)
- */
-public class SingleFieldBuilder
- <MType extends GeneratedMessage,
- BType extends GeneratedMessage.Builder,
- IType extends MessageOrBuilder>
- implements GeneratedMessage.BuilderParent {
-
- // Parent to send changes to.
- private GeneratedMessage.BuilderParent parent;
-
- // Invariant: one of builder or message fields must be non-null.
-
- // If set, this is the case where we are backed by a builder. In this case,
- // message field represents a cached message for the builder (or null if
- // there is no cached message).
- private BType builder;
-
- // If builder is non-null, this represents a cached message from the builder.
- // If builder is null, this is the authoritative message for the field.
- private MType message;
-
- // Indicates that we've built a message and so we are now obligated
- // to dispatch dirty invalidations. See GeneratedMessage.BuilderListener.
- private boolean isClean;
-
- public SingleFieldBuilder(
- MType message,
- GeneratedMessage.BuilderParent parent,
- boolean isClean) {
- if (message == null) {
- throw new NullPointerException();
- }
- this.message = message;
- this.parent = parent;
- this.isClean = isClean;
- }
-
- public void dispose() {
- // Null out parent so we stop sending it invalidations.
- parent = null;
- }
-
- /**
- * Get the message for the field. If the message is currently stored
- * as a {@code Builder}, it is converted to a {@code Message} by
- * calling {@link Message.Builder#buildPartial} on it. If no message has
- * been set, returns the default instance of the message.
- *
- * @return the message for the field
- */
- @SuppressWarnings("unchecked")
- public MType getMessage() {
- if (message == null) {
- // If message is null, the invariant is that we must be have a builder.
- message = (MType) builder.buildPartial();
- }
- return message;
- }
-
- /**
- * Builds the message and returns it.
- *
- * @return the message
- */
- public MType build() {
- // Now that build has been called, we are required to dispatch
- // invalidations.
- isClean = true;
- return getMessage();
- }
-
- /**
- * Gets a builder for the field. If no builder has been created yet, a
- * builder is created on demand by calling {@link Message#toBuilder}.
- *
- * @return The builder for the field
- */
- @SuppressWarnings("unchecked")
- public BType getBuilder() {
- if (builder == null) {
- // builder.mergeFrom() on a fresh builder
- // does not create any sub-objects with independent clean/dirty states,
- // therefore setting the builder itself to clean without actually calling
- // build() cannot break any invariants.
- builder = (BType) message.newBuilderForType(this);
- builder.mergeFrom(message); // no-op if message is the default message
- builder.markClean();
- }
- return builder;
- }
-
- /**
- * Gets the base class interface for the field. This may either be a builder
- * or a message. It will return whatever is more efficient.
- *
- * @return the message or builder for the field as the base class interface
- */
- @SuppressWarnings("unchecked")
- public IType getMessageOrBuilder() {
- if (builder != null) {
- return (IType) builder;
- } else {
- return (IType) message;
- }
- }
-
- /**
- * Sets a message for the field replacing any existing value.
- *
- * @param message the message to set
- * @return the builder
- */
- public SingleFieldBuilder<MType, BType, IType> setMessage(
- MType message) {
- if (message == null) {
- throw new NullPointerException();
- }
- this.message = message;
- if (builder != null) {
- builder.dispose();
- builder = null;
- }
- onChanged();
- return this;
- }
-
- /**
- * Merges the field from another field.
- *
- * @param value the value to merge from
- * @return the builder
- */
- public SingleFieldBuilder<MType, BType, IType> mergeFrom(
- MType value) {
- if (builder == null && message == message.getDefaultInstanceForType()) {
- message = value;
- } else {
- getBuilder().mergeFrom(value);
- }
- onChanged();
- return this;
- }
-
- /**
- * Clears the value of the field.
- *
- * @return the builder
- */
- @SuppressWarnings("unchecked")
- public SingleFieldBuilder<MType, BType, IType> clear() {
- message = (MType) (message != null ?
- message.getDefaultInstanceForType() :
- builder.getDefaultInstanceForType());
- if (builder != null) {
- builder.dispose();
- builder = null;
- }
- onChanged();
- return this;
- }
-
- /**
- * Called when a the builder or one of its nested children has changed
- * and any parent should be notified of its invalidation.
- */
- private void onChanged() {
- // If builder is null, this is the case where onChanged is being called
- // from setMessage or clear.
- if (builder != null) {
- message = null;
- }
- if (isClean && parent != null) {
- parent.markDirty();
-
- // Don't keep dispatching invalidations until build is called again.
- isClean = false;
- }
- }
-
- @Override
- public void markDirty() {
- onChanged();
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/df93c13f/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilderV3.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilderV3.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilderV3.java
deleted file mode 100644
index 0a294dc..0000000
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilderV3.java
+++ /dev/null
@@ -1,241 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-package org.apache.hadoop.hbase.shaded.com.google.protobuf;
-
-/**
- * {@code SingleFieldBuilderV3} implements a structure that a protocol
- * message uses to hold a single field of another protocol message. It supports
- * the classical use case of setting an immutable {@link Message} as the value
- * of the field and is highly optimized around this.
- * <br>
- * It also supports the additional use case of setting a {@link Message.Builder}
- * as the field and deferring conversion of that {@code Builder}
- * to an immutable {@code Message}. In this way, it's possible to maintain
- * a tree of {@code Builder}'s that acts as a fully read/write data
- * structure.
- * <br>
- * Logically, one can think of a tree of builders as converting the entire tree
- * to messages when build is called on the root or when any method is called
- * that desires a Message instead of a Builder. In terms of the implementation,
- * the {@code SingleFieldBuilderV3} and {@code RepeatedFieldBuilderV3}
- * classes cache messages that were created so that messages only need to be
- * created when some change occurred in its builder or a builder for one of its
- * descendants.
- *
- * @param <MType> the type of message for the field
- * @param <BType> the type of builder for the field
- * @param <IType> the common interface for the message and the builder
- *
- * @author jonp@google.com (Jon Perlow)
- */
-public class SingleFieldBuilderV3
- <MType extends AbstractMessage,
- BType extends AbstractMessage.Builder,
- IType extends MessageOrBuilder>
- implements AbstractMessage.BuilderParent {
-
- // Parent to send changes to.
- private AbstractMessage.BuilderParent parent;
-
- // Invariant: one of builder or message fields must be non-null.
-
- // If set, this is the case where we are backed by a builder. In this case,
- // message field represents a cached message for the builder (or null if
- // there is no cached message).
- private BType builder;
-
- // If builder is non-null, this represents a cached message from the builder.
- // If builder is null, this is the authoritative message for the field.
- private MType message;
-
- // Indicates that we've built a message and so we are now obligated
- // to dispatch dirty invalidations. See AbstractMessage.BuilderListener.
- private boolean isClean;
-
- public SingleFieldBuilderV3(
- MType message,
- AbstractMessage.BuilderParent parent,
- boolean isClean) {
- if (message == null) {
- throw new NullPointerException();
- }
- this.message = message;
- this.parent = parent;
- this.isClean = isClean;
- }
-
- public void dispose() {
- // Null out parent so we stop sending it invalidations.
- parent = null;
- }
-
- /**
- * Get the message for the field. If the message is currently stored
- * as a {@code Builder}, it is converted to a {@code Message} by
- * calling {@link Message.Builder#buildPartial} on it. If no message has
- * been set, returns the default instance of the message.
- *
- * @return the message for the field
- */
- @SuppressWarnings("unchecked")
- public MType getMessage() {
- if (message == null) {
- // If message is null, the invariant is that we must be have a builder.
- message = (MType) builder.buildPartial();
- }
- return message;
- }
-
- /**
- * Builds the message and returns it.
- *
- * @return the message
- */
- public MType build() {
- // Now that build has been called, we are required to dispatch
- // invalidations.
- isClean = true;
- return getMessage();
- }
-
- /**
- * Gets a builder for the field. If no builder has been created yet, a
- * builder is created on demand by calling {@link Message#toBuilder}.
- *
- * @return The builder for the field
- */
- @SuppressWarnings("unchecked")
- public BType getBuilder() {
- if (builder == null) {
- // builder.mergeFrom() on a fresh builder
- // does not create any sub-objects with independent clean/dirty states,
- // therefore setting the builder itself to clean without actually calling
- // build() cannot break any invariants.
- builder = (BType) message.newBuilderForType(this);
- builder.mergeFrom(message); // no-op if message is the default message
- builder.markClean();
- }
- return builder;
- }
-
- /**
- * Gets the base class interface for the field. This may either be a builder
- * or a message. It will return whatever is more efficient.
- *
- * @return the message or builder for the field as the base class interface
- */
- @SuppressWarnings("unchecked")
- public IType getMessageOrBuilder() {
- if (builder != null) {
- return (IType) builder;
- } else {
- return (IType) message;
- }
- }
-
- /**
- * Sets a message for the field replacing any existing value.
- *
- * @param message the message to set
- * @return the builder
- */
- public SingleFieldBuilderV3<MType, BType, IType> setMessage(
- MType message) {
- if (message == null) {
- throw new NullPointerException();
- }
- this.message = message;
- if (builder != null) {
- builder.dispose();
- builder = null;
- }
- onChanged();
- return this;
- }
-
- /**
- * Merges the field from another field.
- *
- * @param value the value to merge from
- * @return the builder
- */
- public SingleFieldBuilderV3<MType, BType, IType> mergeFrom(
- MType value) {
- if (builder == null && message == message.getDefaultInstanceForType()) {
- message = value;
- } else {
- getBuilder().mergeFrom(value);
- }
- onChanged();
- return this;
- }
-
- /**
- * Clears the value of the field.
- *
- * @return the builder
- */
- @SuppressWarnings("unchecked")
- public SingleFieldBuilderV3<MType, BType, IType> clear() {
- message = (MType) (message != null ?
- message.getDefaultInstanceForType() :
- builder.getDefaultInstanceForType());
- if (builder != null) {
- builder.dispose();
- builder = null;
- }
- onChanged();
- return this;
- }
-
- /**
- * Called when a the builder or one of its nested children has changed
- * and any parent should be notified of its invalidation.
- */
- private void onChanged() {
- // If builder is null, this is the case where onChanged is being called
- // from setMessage or clear.
- if (builder != null) {
- message = null;
- }
- if (isClean && parent != null) {
- parent.markDirty();
-
- // Don't keep dispatching invalidations until build is called again.
- isClean = false;
- }
- }
-
- @Override
- public void markDirty() {
- onChanged();
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/df93c13f/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SmallSortedMap.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SmallSortedMap.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SmallSortedMap.java
deleted file mode 100644
index a24c2ce..0000000
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SmallSortedMap.java
+++ /dev/null
@@ -1,673 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-package org.apache.hadoop.hbase.shaded.com.google.protobuf;
-
-import java.util.AbstractMap;
-import java.util.AbstractSet;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.TreeMap;
-
-/**
- * A custom map implementation from FieldDescriptor to Object optimized to
- * minimize the number of memory allocations for instances with a small number
- * of mappings. The implementation stores the first {@code k} mappings in an
- * array for a configurable value of {@code k}, allowing direct access to the
- * corresponding {@code Entry}s without the need to create an Iterator. The
- * remaining entries are stored in an overflow map. Iteration over the entries
- * in the map should be done as follows:
- *
- * <pre> {@code
- * for (int i = 0; i < fieldMap.getNumArrayEntries(); i++) {
- * process(fieldMap.getArrayEntryAt(i));
- * }
- * for (Map.Entry<K, V> entry : fieldMap.getOverflowEntries()) {
- * process(entry);
- * }
- * }</pre>
- *
- * The resulting iteration is in order of ascending field tag number. The
- * object returned by {@link #entrySet()} adheres to the same contract but is
- * less efficient as it necessarily involves creating an object for iteration.
- * <p>
- * The tradeoff for this memory efficiency is that the worst case running time
- * of the {@code put()} operation is {@code O(k + lg n)}, which happens when
- * entries are added in descending order. {@code k} should be chosen such that
- * it covers enough common cases without adversely affecting larger maps. In
- * practice, the worst case scenario does not happen for extensions because
- * extension fields are serialized and deserialized in order of ascending tag
- * number, but the worst case scenario can happen for DynamicMessages.
- * <p>
- * The running time for all other operations is similar to that of
- * {@code TreeMap}.
- * <p>
- * Instances are not thread-safe until {@link #makeImmutable()} is called,
- * after which any modifying operation will result in an
- * {@link UnsupportedOperationException}.
- *
- * @author darick@google.com Darick Tong
- */
-// This class is final for all intents and purposes because the constructor is
-// private. However, the FieldDescriptor-specific logic is encapsulated in
-// a subclass to aid testability of the core logic.
-class SmallSortedMap<K extends Comparable<K>, V> extends AbstractMap<K, V> {
-
- /**
- * Creates a new instance for mapping FieldDescriptors to their values.
- * The {@link #makeImmutable()} implementation will convert the List values
- * of any repeated fields to unmodifiable lists.
- *
- * @param arraySize The size of the entry array containing the
- * lexicographically smallest mappings.
- */
- static <FieldDescriptorType extends
- FieldSet.FieldDescriptorLite<FieldDescriptorType>>
- SmallSortedMap<FieldDescriptorType, Object> newFieldMap(int arraySize) {
- return new SmallSortedMap<FieldDescriptorType, Object>(arraySize) {
- @Override
- @SuppressWarnings("unchecked")
- public void makeImmutable() {
- if (!isImmutable()) {
- for (int i = 0; i < getNumArrayEntries(); i++) {
- final Map.Entry<FieldDescriptorType, Object> entry =
- getArrayEntryAt(i);
- if (entry.getKey().isRepeated()) {
- final List value = (List) entry.getValue();
- entry.setValue(Collections.unmodifiableList(value));
- }
- }
- for (Map.Entry<FieldDescriptorType, Object> entry :
- getOverflowEntries()) {
- if (entry.getKey().isRepeated()) {
- final List value = (List) entry.getValue();
- entry.setValue(Collections.unmodifiableList(value));
- }
- }
- }
- super.makeImmutable();
- }
- };
- }
-
- /**
- * Creates a new instance for testing.
- *
- * @param arraySize The size of the entry array containing the
- * lexicographically smallest mappings.
- */
- static <K extends Comparable<K>, V> SmallSortedMap<K, V> newInstanceForTest(
- int arraySize) {
- return new SmallSortedMap<K, V>(arraySize);
- }
-
- private final int maxArraySize;
- // The "entry array" is actually a List because generic arrays are not
- // allowed. ArrayList also nicely handles the entry shifting on inserts and
- // removes.
- private List<Entry> entryList;
- private Map<K, V> overflowEntries;
- private boolean isImmutable;
- // The EntrySet is a stateless view of the Map. It's initialized the first
- // time it is requested and reused henceforth.
- private volatile EntrySet lazyEntrySet;
-
- /**
- * @code arraySize Size of the array in which the lexicographically smallest
- * mappings are stored. (i.e. the {@code k} referred to in the class
- * documentation).
- */
- private SmallSortedMap(int arraySize) {
- this.maxArraySize = arraySize;
- this.entryList = Collections.emptyList();
- this.overflowEntries = Collections.emptyMap();
- }
-
- /** Make this map immutable from this point forward. */
- public void makeImmutable() {
- if (!isImmutable) {
- // Note: There's no need to wrap the entryList in an unmodifiableList
- // because none of the list's accessors are exposed. The iterator() of
- // overflowEntries, on the other hand, is exposed so it must be made
- // unmodifiable.
- overflowEntries = overflowEntries.isEmpty() ?
- Collections.<K, V>emptyMap() :
- Collections.unmodifiableMap(overflowEntries);
- isImmutable = true;
- }
- }
-
- /** @return Whether {@link #makeImmutable()} has been called. */
- public boolean isImmutable() {
- return isImmutable;
- }
-
- /** @return The number of entries in the entry array. */
- public int getNumArrayEntries() {
- return entryList.size();
- }
-
- /** @return The array entry at the given {@code index}. */
- public Map.Entry<K, V> getArrayEntryAt(int index) {
- return entryList.get(index);
- }
-
- /** @return There number of overflow entries. */
- public int getNumOverflowEntries() {
- return overflowEntries.size();
- }
-
- /** @return An iterable over the overflow entries. */
- public Iterable<Map.Entry<K, V>> getOverflowEntries() {
- return overflowEntries.isEmpty() ?
- EmptySet.<Map.Entry<K, V>>iterable() :
- overflowEntries.entrySet();
- }
-
-
- @Override
- public int size() {
- return entryList.size() + overflowEntries.size();
- }
-
- /**
- * The implementation throws a {@code ClassCastException} if o is not an
- * object of type {@code K}.
- *
- * {@inheritDoc}
- */
- @Override
- public boolean containsKey(Object o) {
- @SuppressWarnings("unchecked")
- final K key = (K) o;
- return binarySearchInArray(key) >= 0 || overflowEntries.containsKey(key);
- }
-
- /**
- * The implementation throws a {@code ClassCastException} if o is not an
- * object of type {@code K}.
- *
- * {@inheritDoc}
- */
- @Override
- public V get(Object o) {
- @SuppressWarnings("unchecked")
- final K key = (K) o;
- final int index = binarySearchInArray(key);
- if (index >= 0) {
- return entryList.get(index).getValue();
- }
- return overflowEntries.get(key);
- }
-
- @Override
- public V put(K key, V value) {
- checkMutable();
- final int index = binarySearchInArray(key);
- if (index >= 0) {
- // Replace existing array entry.
- return entryList.get(index).setValue(value);
- }
- ensureEntryArrayMutable();
- final int insertionPoint = -(index + 1);
- if (insertionPoint >= maxArraySize) {
- // Put directly in overflow.
- return getOverflowEntriesMutable().put(key, value);
- }
- // Insert new Entry in array.
- if (entryList.size() == maxArraySize) {
- // Shift the last array entry into overflow.
- final Entry lastEntryInArray = entryList.remove(maxArraySize - 1);
- getOverflowEntriesMutable().put(lastEntryInArray.getKey(),
- lastEntryInArray.getValue());
- }
- entryList.add(insertionPoint, new Entry(key, value));
- return null;
- }
-
- @Override
- public void clear() {
- checkMutable();
- if (!entryList.isEmpty()) {
- entryList.clear();
- }
- if (!overflowEntries.isEmpty()) {
- overflowEntries.clear();
- }
- }
-
- /**
- * The implementation throws a {@code ClassCastException} if o is not an
- * object of type {@code K}.
- *
- * {@inheritDoc}
- */
- @Override
- public V remove(Object o) {
- checkMutable();
- @SuppressWarnings("unchecked")
- final K key = (K) o;
- final int index = binarySearchInArray(key);
- if (index >= 0) {
- return removeArrayEntryAt(index);
- }
- // overflowEntries might be Collections.unmodifiableMap(), so only
- // call remove() if it is non-empty.
- if (overflowEntries.isEmpty()) {
- return null;
- } else {
- return overflowEntries.remove(key);
- }
- }
-
- private V removeArrayEntryAt(int index) {
- checkMutable();
- final V removed = entryList.remove(index).getValue();
- if (!overflowEntries.isEmpty()) {
- // Shift the first entry in the overflow to be the last entry in the
- // array.
- final Iterator<Map.Entry<K, V>> iterator =
- getOverflowEntriesMutable().entrySet().iterator();
- entryList.add(new Entry(iterator.next()));
- iterator.remove();
- }
- return removed;
- }
-
- /**
- * @param key The key to find in the entry array.
- * @return The returned integer position follows the same semantics as the
- * value returned by {@link java.util.Arrays#binarySearch()}.
- */
- private int binarySearchInArray(K key) {
- int left = 0;
- int right = entryList.size() - 1;
-
- // Optimization: For the common case in which entries are added in
- // ascending tag order, check the largest element in the array before
- // doing a full binary search.
- if (right >= 0) {
- int cmp = key.compareTo(entryList.get(right).getKey());
- if (cmp > 0) {
- return -(right + 2); // Insert point is after "right".
- } else if (cmp == 0) {
- return right;
- }
- }
-
- while (left <= right) {
- int mid = (left + right) / 2;
- int cmp = key.compareTo(entryList.get(mid).getKey());
- if (cmp < 0) {
- right = mid - 1;
- } else if (cmp > 0) {
- left = mid + 1;
- } else {
- return mid;
- }
- }
- return -(left + 1);
- }
-
- /**
- * Similar to the AbstractMap implementation of {@code keySet()} and
- * {@code values()}, the entry set is created the first time this method is
- * called, and returned in response to all subsequent calls.
- *
- * {@inheritDoc}
- */
- @Override
- public Set<Map.Entry<K, V>> entrySet() {
- if (lazyEntrySet == null) {
- lazyEntrySet = new EntrySet();
- }
- return lazyEntrySet;
- }
-
-
- /**
- * @throws UnsupportedOperationException if {@link #makeImmutable()} has
- * has been called.
- */
- private void checkMutable() {
- if (isImmutable) {
- throw new UnsupportedOperationException();
- }
- }
-
- /**
- * @return a {@link SortedMap} to which overflow entries mappings can be
- * added or removed.
- * @throws UnsupportedOperationException if {@link #makeImmutable()} has been
- * called.
- */
- @SuppressWarnings("unchecked")
- private SortedMap<K, V> getOverflowEntriesMutable() {
- checkMutable();
- if (overflowEntries.isEmpty() && !(overflowEntries instanceof TreeMap)) {
- overflowEntries = new TreeMap<K, V>();
- }
- return (SortedMap<K, V>) overflowEntries;
- }
-
- /**
- * Lazily creates the entry list. Any code that adds to the list must first
- * call this method.
- */
- private void ensureEntryArrayMutable() {
- checkMutable();
- if (entryList.isEmpty() && !(entryList instanceof ArrayList)) {
- entryList = new ArrayList<Entry>(maxArraySize);
- }
- }
-
- /**
- * Entry implementation that implements Comparable in order to support
- * binary search within the entry array. Also checks mutability in
- * {@link #setValue()}.
- */
- private class Entry implements Map.Entry<K, V>, Comparable<Entry> {
-
- private final K key;
- private V value;
-
- Entry(Map.Entry<K, V> copy) {
- this(copy.getKey(), copy.getValue());
- }
-
- Entry(K key, V value) {
- this.key = key;
- this.value = value;
- }
-
- @Override
- public K getKey() {
- return key;
- }
-
- @Override
- public V getValue() {
- return value;
- }
-
- @Override
- public int compareTo(Entry other) {
- return getKey().compareTo(other.getKey());
- }
-
- @Override
- public V setValue(V newValue) {
- checkMutable();
- final V oldValue = this.value;
- this.value = newValue;
- return oldValue;
- }
-
- @Override
- public boolean equals(Object o) {
- if (o == this) {
- return true;
- }
- if (!(o instanceof Map.Entry)) {
- return false;
- }
- @SuppressWarnings("unchecked")
- Map.Entry<?, ?> other = (Map.Entry<?, ?>) o;
- return equals(key, other.getKey()) && equals(value, other.getValue());
- }
-
- @Override
- public int hashCode() {
- return (key == null ? 0 : key.hashCode()) ^
- (value == null ? 0 : value.hashCode());
- }
-
- @Override
- public String toString() {
- return key + "=" + value;
- }
-
- /** equals() that handles null values. */
- private boolean equals(Object o1, Object o2) {
- return o1 == null ? o2 == null : o1.equals(o2);
- }
- }
-
- /**
- * Stateless view of the entries in the field map.
- */
- private class EntrySet extends AbstractSet<Map.Entry<K, V>> {
-
- @Override
- public Iterator<Map.Entry<K, V>> iterator() {
- return new EntryIterator();
- }
-
- @Override
- public int size() {
- return SmallSortedMap.this.size();
- }
-
- /**
- * Throws a {@link ClassCastException} if o is not of the expected type.
- *
- * {@inheritDoc}
- */
- @Override
- public boolean contains(Object o) {
- @SuppressWarnings("unchecked")
- final Map.Entry<K, V> entry = (Map.Entry<K, V>) o;
- final V existing = get(entry.getKey());
- final V value = entry.getValue();
- return existing == value ||
- (existing != null && existing.equals(value));
- }
-
- @Override
- public boolean add(Map.Entry<K, V> entry) {
- if (!contains(entry)) {
- put(entry.getKey(), entry.getValue());
- return true;
- }
- return false;
- }
-
- /**
- * Throws a {@link ClassCastException} if o is not of the expected type.
- *
- * {@inheritDoc}
- */
- @Override
- public boolean remove(Object o) {
- @SuppressWarnings("unchecked")
- final Map.Entry<K, V> entry = (Map.Entry<K, V>) o;
- if (contains(entry)) {
- SmallSortedMap.this.remove(entry.getKey());
- return true;
- }
- return false;
- }
-
- @Override
- public void clear() {
- SmallSortedMap.this.clear();
- }
- }
-
-
- /**
- * Iterator implementation that switches from the entry array to the overflow
- * entries appropriately.
- */
- private class EntryIterator implements Iterator<Map.Entry<K, V>> {
-
- private int pos = -1;
- private boolean nextCalledBeforeRemove;
- private Iterator<Map.Entry<K, V>> lazyOverflowIterator;
-
- @Override
- public boolean hasNext() {
- return (pos + 1) < entryList.size() ||
- getOverflowIterator().hasNext();
- }
-
- @Override
- public Map.Entry<K, V> next() {
- nextCalledBeforeRemove = true;
- // Always increment pos so that we know whether the last returned value
- // was from the array or from overflow.
- if (++pos < entryList.size()) {
- return entryList.get(pos);
- }
- return getOverflowIterator().next();
- }
-
- @Override
- public void remove() {
- if (!nextCalledBeforeRemove) {
- throw new IllegalStateException("remove() was called before next()");
- }
- nextCalledBeforeRemove = false;
- checkMutable();
-
- if (pos < entryList.size()) {
- removeArrayEntryAt(pos--);
- } else {
- getOverflowIterator().remove();
- }
- }
-
- /**
- * It is important to create the overflow iterator only after the array
- * entries have been iterated over because the overflow entry set changes
- * when the client calls remove() on the array entries, which invalidates
- * any existing iterators.
- */
- private Iterator<Map.Entry<K, V>> getOverflowIterator() {
- if (lazyOverflowIterator == null) {
- lazyOverflowIterator = overflowEntries.entrySet().iterator();
- }
- return lazyOverflowIterator;
- }
- }
-
- /**
- * Helper class that holds immutable instances of an Iterable/Iterator that
- * we return when the overflow entries is empty. This eliminates the creation
- * of an Iterator object when there is nothing to iterate over.
- */
- private static class EmptySet {
-
- private static final Iterator<Object> ITERATOR =
- new Iterator<Object>() {
- @Override
- public boolean hasNext() {
- return false;
- }
- @Override
- public Object next() {
- throw new NoSuchElementException();
- }
- @Override
- public void remove() {
- throw new UnsupportedOperationException();
- }
- };
-
- private static final Iterable<Object> ITERABLE =
- new Iterable<Object>() {
- @Override
- public Iterator<Object> iterator() {
- return ITERATOR;
- }
- };
-
- @SuppressWarnings("unchecked")
- static <T> Iterable<T> iterable() {
- return (Iterable<T>) ITERABLE;
- }
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
-
- if (!(o instanceof SmallSortedMap)) {
- return super.equals(o);
- }
-
- SmallSortedMap<?, ?> other = (SmallSortedMap<?, ?>) o;
- final int size = size();
- if (size != other.size()) {
- return false;
- }
-
- // Best effort try to avoid allocating an entry set.
- final int numArrayEntries = getNumArrayEntries();
- if (numArrayEntries != other.getNumArrayEntries()) {
- return entrySet().equals(other.entrySet());
- }
-
- for (int i = 0; i < numArrayEntries; i++) {
- if (!getArrayEntryAt(i).equals(other.getArrayEntryAt(i))) {
- return false;
- }
- }
-
- if (numArrayEntries != size) {
- return overflowEntries.equals(other.overflowEntries);
- }
-
-
- return true;
- }
-
- @Override
- public int hashCode() {
- int h = 0;
- final int listSize = getNumArrayEntries();
- for (int i = 0; i < listSize; i++) {
- h += entryList.get(i).hashCode();
- }
- // Avoid the iterator allocation if possible.
- if (getNumOverflowEntries() > 0) {
- h += overflowEntries.hashCode();
- }
- return h;
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/df93c13f/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContext.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContext.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContext.java
deleted file mode 100644
index ab4938c..0000000
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContext.java
+++ /dev/null
@@ -1,539 +0,0 @@
-// Generated by the protocol buffer compiler. DO NOT EDIT!
-// source: google/protobuf/source_context.proto
-
-package org.apache.hadoop.hbase.shaded.com.google.protobuf;
-
-/**
- * <pre>
- * `SourceContext` represents information about the source of a
- * protobuf element, like the file in which it is defined.
- * </pre>
- *
- * Protobuf type {@code google.protobuf.SourceContext}
- */
-public final class SourceContext extends
- org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
- // @@protoc_insertion_point(message_implements:google.protobuf.SourceContext)
- SourceContextOrBuilder {
- // Use SourceContext.newBuilder() to construct.
- private SourceContext(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
- super(builder);
- }
- private SourceContext() {
- fileName_ = "";
- }
-
- @java.lang.Override
- public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
- }
- private SourceContext(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
- this();
- int mutable_bitField0_ = 0;
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!input.skipField(tag)) {
- done = true;
- }
- break;
- }
- case 10: {
- java.lang.String s = input.readStringRequireUtf8();
-
- fileName_ = s;
- break;
- }
- }
- }
- } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
- e).setUnfinishedMessage(this);
- } finally {
- makeExtensionsImmutable();
- }
- }
- public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextProto.internal_static_google_protobuf_SourceContext_descriptor;
- }
-
- protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextProto.internal_static_google_protobuf_SourceContext_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder.class);
- }
-
- public static final int FILE_NAME_FIELD_NUMBER = 1;
- private volatile java.lang.Object fileName_;
- /**
- * <pre>
- * The path-qualified name of the .proto file that contained the associated
- * protobuf element. For example: `"google/protobuf/source_context.proto"`.
- * </pre>
- *
- * <code>string file_name = 1;</code>
- */
- public java.lang.String getFileName() {
- java.lang.Object ref = fileName_;
- if (ref instanceof java.lang.String) {
- return (java.lang.String) ref;
- } else {
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
- (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- fileName_ = s;
- return s;
- }
- }
- /**
- * <pre>
- * The path-qualified name of the .proto file that contained the associated
- * protobuf element. For example: `"google/protobuf/source_context.proto"`.
- * </pre>
- *
- * <code>string file_name = 1;</code>
- */
- public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
- getFileNameBytes() {
- java.lang.Object ref = fileName_;
- if (ref instanceof java.lang.String) {
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- fileName_ = b;
- return b;
- } else {
- return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
- }
- }
-
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized == 1) return true;
- if (isInitialized == 0) return false;
-
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- if (!getFileNameBytes().isEmpty()) {
- org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, fileName_);
- }
- }
-
- public int getSerializedSize() {
- int size = memoizedSize;
- if (size != -1) return size;
-
- size = 0;
- if (!getFileNameBytes().isEmpty()) {
- size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, fileName_);
- }
- memoizedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext)) {
- return super.equals(obj);
- }
- org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext) obj;
-
- boolean result = true;
- result = result && getFileName()
- .equals(other.getFileName());
- return result;
- }
-
- @java.lang.Override
- public int hashCode() {
- if (memoizedHashCode != 0) {
- return memoizedHashCode;
- }
- int hash = 41;
- hash = (19 * hash) + getDescriptor().hashCode();
- hash = (37 * hash) + FILE_NAME_FIELD_NUMBER;
- hash = (53 * hash) + getFileName().hashCode();
- hash = (29 * hash) + unknownFields.hashCode();
- memoizedHashCode = hash;
- return hash;
- }
-
- public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parseFrom(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
- throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parseFrom(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parseFrom(byte[] data)
- throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parseFrom(
- byte[] data,
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
- }
- public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parseFrom(
- java.io.InputStream input,
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input);
- }
- public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parseDelimitedFrom(
- java.io.InputStream input,
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
- .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
- }
- public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parseFrom(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input);
- }
- public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parseFrom(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
- .parseWithIOException(PARSER, input, extensionRegistry);
- }
-
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder() {
- return DEFAULT_INSTANCE.toBuilder();
- }
- public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext prototype) {
- return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() {
- return this == DEFAULT_INSTANCE
- ? new Builder() : new Builder().mergeFrom(this);
- }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- /**
- * <pre>
- * `SourceContext` represents information about the source of a
- * protobuf element, like the file in which it is defined.
- * </pre>
- *
- * Protobuf type {@code google.protobuf.SourceContext}
- */
- public static final class Builder extends
- org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
- // @@protoc_insertion_point(builder_implements:google.protobuf.SourceContext)
- org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder {
- public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextProto.internal_static_google_protobuf_SourceContext_descriptor;
- }
-
- protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextProto.internal_static_google_protobuf_SourceContext_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder.class);
- }
-
- // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
- .alwaysUseFieldBuilders) {
- }
- }
- public Builder clear() {
- super.clear();
- fileName_ = "";
-
- return this;
- }
-
- public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextProto.internal_static_google_protobuf_SourceContext_descriptor;
- }
-
- public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext getDefaultInstanceForType() {
- return org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.getDefaultInstance();
- }
-
- public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext build() {
- org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext buildPartial() {
- org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext(this);
- result.fileName_ = fileName_;
- onBuilt();
- return result;
- }
-
- public Builder clone() {
- return (Builder) super.clone();
- }
- public Builder setField(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
- Object value) {
- return (Builder) super.setField(field, value);
- }
- public Builder clearField(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
- return (Builder) super.clearField(field);
- }
- public Builder clearOneof(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
- return (Builder) super.clearOneof(oneof);
- }
- public Builder setRepeatedField(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
- int index, Object value) {
- return (Builder) super.setRepeatedField(field, index, value);
- }
- public Builder addRepeatedField(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
- Object value) {
- return (Builder) super.addRepeatedField(field, value);
- }
- public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext) {
- return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext other) {
- if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.getDefaultInstance()) return this;
- if (!other.getFileName().isEmpty()) {
- fileName_ = other.fileName_;
- onChanged();
- }
- onChanged();
- return this;
- }
-
- public final boolean isInitialized() {
- return true;
- }
-
- public Builder mergeFrom(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext) e.getUnfinishedMessage();
- throw e.unwrapIOException();
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
- }
- }
- return this;
- }
-
- private java.lang.Object fileName_ = "";
- /**
- * <pre>
- * The path-qualified name of the .proto file that contained the associated
- * protobuf element. For example: `"google/protobuf/source_context.proto"`.
- * </pre>
- *
- * <code>string file_name = 1;</code>
- */
- public java.lang.String getFileName() {
- java.lang.Object ref = fileName_;
- if (!(ref instanceof java.lang.String)) {
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
- (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- fileName_ = s;
- return s;
- } else {
- return (java.lang.String) ref;
- }
- }
- /**
- * <pre>
- * The path-qualified name of the .proto file that contained the associated
- * protobuf element. For example: `"google/protobuf/source_context.proto"`.
- * </pre>
- *
- * <code>string file_name = 1;</code>
- */
- public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
- getFileNameBytes() {
- java.lang.Object ref = fileName_;
- if (ref instanceof String) {
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- fileName_ = b;
- return b;
- } else {
- return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
- }
- }
- /**
- * <pre>
- * The path-qualified name of the .proto file that contained the associated
- * protobuf element. For example: `"google/protobuf/source_context.proto"`.
- * </pre>
- *
- * <code>string file_name = 1;</code>
- */
- public Builder setFileName(
- java.lang.String value) {
- if (value == null) {
- throw new NullPointerException();
- }
-
- fileName_ = value;
- onChanged();
- return this;
- }
- /**
- * <pre>
- * The path-qualified name of the .proto file that contained the associated
- * protobuf element. For example: `"google/protobuf/source_context.proto"`.
- * </pre>
- *
- * <code>string file_name = 1;</code>
- */
- public Builder clearFileName() {
-
- fileName_ = getDefaultInstance().getFileName();
- onChanged();
- return this;
- }
- /**
- * <pre>
- * The path-qualified name of the .proto file that contained the associated
- * protobuf element. For example: `"google/protobuf/source_context.proto"`.
- * </pre>
- *
- * <code>string file_name = 1;</code>
- */
- public Builder setFileNameBytes(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- checkByteStringIsUtf8(value);
-
- fileName_ = value;
- onChanged();
- return this;
- }
- public final Builder setUnknownFields(
- final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
- return this;
- }
-
- public final Builder mergeUnknownFields(
- final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
- return this;
- }
-
-
- // @@protoc_insertion_point(builder_scope:google.protobuf.SourceContext)
- }
-
- // @@protoc_insertion_point(class_scope:google.protobuf.SourceContext)
- private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext DEFAULT_INSTANCE;
- static {
- DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext();
- }
-
- public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext getDefaultInstance() {
- return DEFAULT_INSTANCE;
- }
-
- private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SourceContext>
- PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<SourceContext>() {
- public SourceContext parsePartialFrom(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
- return new SourceContext(input, extensionRegistry);
- }
- };
-
- public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SourceContext> parser() {
- return PARSER;
- }
-
- @java.lang.Override
- public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<SourceContext> getParserForType() {
- return PARSER;
- }
-
- public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext getDefaultInstanceForType() {
- return DEFAULT_INSTANCE;
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/hbase/blob/df93c13f/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContextOrBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContextOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContextOrBuilder.java
deleted file mode 100644
index c35eceb..0000000
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContextOrBuilder.java
+++ /dev/null
@@ -1,29 +0,0 @@
-// Generated by the protocol buffer compiler. DO NOT EDIT!
-// source: google/protobuf/source_context.proto
-
-package org.apache.hadoop.hbase.shaded.com.google.protobuf;
-
-public interface SourceContextOrBuilder extends
- // @@protoc_insertion_point(interface_extends:google.protobuf.SourceContext)
- org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
-
- /**
- * <pre>
- * The path-qualified name of the .proto file that contained the associated
- * protobuf element. For example: `"google/protobuf/source_context.proto"`.
- * </pre>
- *
- * <code>string file_name = 1;</code>
- */
- java.lang.String getFileName();
- /**
- * <pre>
- * The path-qualified name of the .proto file that contained the associated
- * protobuf element. For example: `"google/protobuf/source_context.proto"`.
- * </pre>
- *
- * <code>string file_name = 1;</code>
- */
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
- getFileNameBytes();
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/df93c13f/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContextProto.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContextProto.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContextProto.java
deleted file mode 100644
index 48f2e3f..0000000
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContextProto.java
+++ /dev/null
@@ -1,60 +0,0 @@
-// Generated by the protocol buffer compiler. DO NOT EDIT!
-// source: google/protobuf/source_context.proto
-
-package org.apache.hadoop.hbase.shaded.com.google.protobuf;
-
-public final class SourceContextProto {
- private SourceContextProto() {}
- public static void registerAllExtensions(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
- }
-
- public static void registerAllExtensions(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
- registerAllExtensions(
- (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
- }
- static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
- internal_static_google_protobuf_SourceContext_descriptor;
- static final
- org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
- internal_static_google_protobuf_SourceContext_fieldAccessorTable;
-
- public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
- getDescriptor() {
- return descriptor;
- }
- private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
- descriptor;
- static {
- java.lang.String[] descriptorData = {
- "\n$google/protobuf/source_context.proto\022\017" +
- "google.protobuf\"\"\n\rSourceContext\022\021\n\tfile" +
- "_name\030\001 \001(\tB\225\001\n\023com.google.protobufB\022Sou" +
- "rceContextProtoP\001ZAgoogle.golang.org/gen" +
- "proto/protobuf/source_context;source_con" +
- "text\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTy" +
- "pesb\006proto3"
- };
- org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
- public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
- org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
- descriptor = root;
- return null;
- }
- };
- org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
- .internalBuildGeneratedFileFrom(descriptorData,
- new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
- }, assigner);
- internal_static_google_protobuf_SourceContext_descriptor =
- getDescriptor().getMessageTypes().get(0);
- internal_static_google_protobuf_SourceContext_fieldAccessorTable = new
- org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
- internal_static_google_protobuf_SourceContext_descriptor,
- new java.lang.String[] { "FileName", });
- }
-
- // @@protoc_insertion_point(outer_class_scope)
-}