You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/06/23 22:13:39 UTC
hbase-thirdparty git commit: Add doc. and submodules -- one for
protobuf so we can patch and then shade, and then another to do all the rest
Repository: hbase-thirdparty
Updated Branches:
refs/heads/master f17a8270c -> 8b127cb75
Add doc. and submodules -- one for protobuf so we can patch and then shade, and then another to do all the rest
Project: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/commit/8b127cb7
Tree: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/tree/8b127cb7
Diff: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/diff/8b127cb7
Branch: refs/heads/master
Commit: 8b127cb750a705e616910c6b85a4fb85f3289357
Parents: f17a827
Author: Michael Stack <st...@apache.org>
Authored: Fri Jun 23 15:13:30 2017 -0700
Committer: Michael Stack <st...@apache.org>
Committed: Fri Jun 23 15:13:30 2017 -0700
----------------------------------------------------------------------
README.txt | 18 +-
.../dependency-reduced-pom.xml | 125 ++
hbase-shaded-protobuf/pom.xml | 178 +++
.../src/main/patches/HBASE-15789_V2.patch | 1262 ++++++++++++++++++
.../src/main/patches/HBASE-17087.patch | 14 +
.../src/main/patches/HBASE-17239.patch | 44 +
hbase-shaded-thirdparty/pom.xml | 120 ++
pom.xml | 263 ++--
8 files changed, 1852 insertions(+), 172 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/8b127cb7/README.txt
----------------------------------------------------------------------
diff --git a/README.txt b/README.txt
index f025d66..e6c0e83 100644
--- a/README.txt
+++ b/README.txt
@@ -1,11 +1,11 @@
-This project contains relocated third-party libraries used by Apache HBase
+This project contains relocated third-party libraries used by Apache HBase.
-Includes protobuf-util, netty-all, gson, and guava offset so they have
-an org.apache.hadoop.hbase.shaded prefix on class name. See the pom.xml
-for the explicit version of each third-party lib included.
+We have two submodules, one to patch and then relocate (shade) protobuf. The other
+modules relocates a bundle of other (unpatched) libs used by hbase. This latter
+set includes protobuf-util, netty-all, gson, and guava.
-We do not include protobuf here because we need to patch it. Patching
-involves unpack of src, patch, and then re-jarring. We can do this
-if only protobuf is involved. Unpack of protobuf plus the above and
-then trying to compile fails because src is missing for some dependency.
-See hbase-protocol-shaded/pom.xml for how it does patch of protobuf.
+All shading is done using the same relocation offset of
+org.apache.hadoop.hbase.shaded; we add this prefix to the relocated thirdparty
+library class names.
+
+See the pom.xml for the explicit version of each third-party lib included.
http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/8b127cb7/hbase-shaded-protobuf/dependency-reduced-pom.xml
----------------------------------------------------------------------
diff --git a/hbase-shaded-protobuf/dependency-reduced-pom.xml b/hbase-shaded-protobuf/dependency-reduced-pom.xml
new file mode 100644
index 0000000..443bc26
--- /dev/null
+++ b/hbase-shaded-protobuf/dependency-reduced-pom.xml
@@ -0,0 +1,125 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <artifactId>hbase-thirdparty</artifactId>
+ <groupId>org.apache.hbase.thirdparty</groupId>
+ <version>1.0.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>hbase-shaded-protobuf</artifactId>
+ <name>Apache HBase Patched & Relocated (Shaded) Protobuf</name>
+ <description>Pulls down protobuf, patches it, relocates/shades, and then bundles it up in a new jar.</description>
+ <build>
+ <plugins>
+ <plugin>
+ <artifactId>maven-source-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <artifactId>maven-clean-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>pre-compile-protoc</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>clean</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <filesets>
+ <fileset>
+ <directory>${basedir}/src/main/java</directory>
+ <includes>
+ <include>**/**</include>
+ </includes>
+ <followSymlinks>false</followSymlinks>
+ </fileset>
+ </filesets>
+ </configuration>
+ </plugin>
+ <plugin>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>3.0.2</version>
+ <configuration>
+ <finalName>${jar.finalName}</finalName>
+ </configuration>
+ </plugin>
+ <plugin>
+ <artifactId>maven-shade-plugin</artifactId>
+ <version>3.0.0</version>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>shade</goal>
+ </goals>
+ <configuration>
+ <shadeSourcesContent>true</shadeSourcesContent>
+ <createSourcesJar>true</createSourcesJar>
+ <relocations>
+ <relocation>
+ <pattern>com.google</pattern>
+ <shadedPattern>${rename.offset}.com.google</shadedPattern>
+ </relocation>
+ </relocations>
+ <artifactSet>
+ <excludes />
+ </artifactSet>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.10</version>
+ <executions>
+ <execution>
+ <id>unpack</id>
+ <phase>package</phase>
+ <goals>
+ <goal>unpack</goal>
+ </goals>
+ <configuration>
+ <artifactItems>
+ <artifactItem>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>${project.artifactId}</artifactId>
+ <version>${project.version}</version>
+ <classifier>sources</classifier>
+ <type>jar</type>
+ <overWrite>true</overWrite>
+ <outputDirectory>${basedir}/src/main/java</outputDirectory>
+ <includes>**/*.java</includes>
+ </artifactItem>
+ </artifactItems>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-patch-plugin</artifactId>
+ <version>1.2</version>
+ <executions>
+ <execution>
+ <id>patch</id>
+ <phase>package</phase>
+ <goals>
+ <goal>apply</goal>
+ </goals>
+ <configuration>
+ <strip>1</strip>
+ <patchDirectory>src/main/patches</patchDirectory>
+ <patchTrackingFile>${project.build.directory}/patches-applied.txt</patchTrackingFile>
+ <naturalOrderProcessing>true</naturalOrderProcessing>
+ </configuration>
+ </execution>
+ </executions>
+ <configuration>
+ <targetDirectory>${basedir}</targetDirectory>
+ <skipApplication>false</skipApplication>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+</project>
+
http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/8b127cb7/hbase-shaded-protobuf/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-shaded-protobuf/pom.xml b/hbase-shaded-protobuf/pom.xml
new file mode 100644
index 0000000..0868c93
--- /dev/null
+++ b/hbase-shaded-protobuf/pom.xml
@@ -0,0 +1,178 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+ ON MVN COMPILE NOT WORKING
+
+ If you wondering why 'mvn compile' does not work building HBase
+ (in particular, if you are doing it for the first time), instead do
+ 'mvn package'. If you are interested in the full story, see
+ https://issues.apache.org/jira/browse/HBASE-6795.
+
+-->
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hbase.thirdparty</groupId>
+ <artifactId>hbase-thirdparty</artifactId>
+ <version>1.0.0-SNAPSHOT</version>
+ <relativePath>..</relativePath>
+ </parent>
+ <artifactId>hbase-shaded-protobuf</artifactId>
+ <name>Apache HBase Patched & Relocated (Shaded) Protobuf</name>
+ <description>
+ Pulls down protobuf, patches it, relocates/shades, and then bundles it up in a new jar.
+ </description>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-source-plugin</artifactId>
+ </plugin>
+ <plugin>
+ <artifactId>maven-clean-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>pre-compile-protoc</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>clean</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <filesets>
+ <fileset>
+ <directory>${basedir}/src/main/java</directory>
+ <includes>
+ <include>**/**</include>
+ </includes>
+ <followSymlinks>false</followSymlinks>
+ </fileset>
+ </filesets>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>3.0.2</version>
+ <configuration>
+ <finalName>${jar.finalName}</finalName>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <version>3.0.0</version>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>shade</goal>
+ </goals>
+ <configuration>
+ <shadeSourcesContent>true</shadeSourcesContent>
+ <createSourcesJar>true</createSourcesJar>
+ <relocations>
+ <relocation>
+ <pattern>com.google</pattern>
+ <shadedPattern>${rename.offset}.com.google</shadedPattern>
+ </relocation>
+ </relocations>
+ <artifactSet>
+ <excludes>
+ <!--Exclude protobuf itself. We get a patched version later over in hbase core
+ <exclude>com.google.protobuf:protobuf-java</exclude>
+ <exclude>com.google.code.findbugs:jsr305</exclude>
+ <exclude>com.google.errorprone:error_prone_annotations</exclude>
+ <exclude>com.google.j2objc:j2objc-annotations</exclude>
+ <exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude>
+ <exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude>
+ -->
+ </excludes>
+ </artifactSet>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.10</version>
+ <executions>
+ <execution>
+ <id>unpack</id>
+ <phase>package</phase>
+ <goals>
+ <goal>unpack</goal>
+ </goals>
+ <configuration>
+ <artifactItems>
+ <artifactItem>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>${project.artifactId}</artifactId>
+ <version>${project.version}</version>
+ <classifier>sources</classifier>
+ <type>jar</type>
+ <overWrite>true</overWrite>
+ <outputDirectory>${basedir}/src/main/java</outputDirectory>
+ <includes>**/*.java</includes>
+ </artifactItem>
+ </artifactItems>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-patch-plugin</artifactId>
+ <version>1.2</version>
+ <configuration>
+ <!--Patches are made at top-level-->
+ <targetDirectory>${basedir}</targetDirectory>
+ <skipApplication>false</skipApplication>
+ </configuration>
+ <executions>
+ <execution>
+ <id>patch</id>
+ <configuration>
+ <strip>1</strip>
+ <patchDirectory>src/main/patches</patchDirectory>
+ <patchTrackingFile>${project.build.directory}/patches-applied.txt</patchTrackingFile>
+ <naturalOrderProcessing>true</naturalOrderProcessing>
+ </configuration>
+ <phase>package</phase>
+ <goals>
+ <!--This should run after the above unpack phase-->
+ <goal>apply</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>com.google.protobuf</groupId>
+ <artifactId>protobuf-java</artifactId>
+ <version>${protobuf.version}</version>
+ </dependency>
+ </dependencies>
+</project>
http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/8b127cb7/hbase-shaded-protobuf/src/main/patches/HBASE-15789_V2.patch
----------------------------------------------------------------------
diff --git a/hbase-shaded-protobuf/src/main/patches/HBASE-15789_V2.patch b/hbase-shaded-protobuf/src/main/patches/HBASE-15789_V2.patch
new file mode 100644
index 0000000..a8116d5
--- /dev/null
+++ b/hbase-shaded-protobuf/src/main/patches/HBASE-15789_V2.patch
@@ -0,0 +1,1262 @@
+diff --git a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteBufferWriter.java b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteBufferWriter.java
+index c0ed636..906c216 100644
+--- a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteBufferWriter.java
++++ b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteBufferWriter.java
+@@ -112,7 +112,7 @@ final class ByteBufferWriter {
+ }
+ }
+
+- private static byte[] getOrCreateBuffer(int requestedSize) {
++ static byte[] getOrCreateBuffer(int requestedSize) {
+ requestedSize = max(requestedSize, MIN_CACHED_BUFFER_SIZE);
+
+ byte[] buffer = getBuffer();
+diff --git a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInput.java b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInput.java
+new file mode 100644
+index 0000000..a745d37
+--- /dev/null
++++ b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInput.java
+@@ -0,0 +1,81 @@
++// Protocol Buffers - Google's data interchange format
++// Copyright 2008 Google Inc. All rights reserved.
++// https://developers.google.com/protocol-buffers/
++//
++// Redistribution and use in source and binary forms, with or without
++// modification, are permitted provided that the following conditions are
++// met:
++//
++// * Redistributions of source code must retain the above copyright
++// notice, this list of conditions and the following disclaimer.
++// * Redistributions in binary form must reproduce the above
++// copyright notice, this list of conditions and the following disclaimer
++// in the documentation and/or other materials provided with the
++// distribution.
++// * Neither the name of Google Inc. nor the names of its
++// contributors may be used to endorse or promote products derived from
++// this software without specific prior written permission.
++//
++// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
++// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
++// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
++// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
++// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
++// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
++// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
++// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
++// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
++// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
++// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
++package org.apache.hadoop.hbase.shaded.com.google.protobuf;
++
++import java.io.IOException;
++import java.nio.ByteBuffer;
++
++/**
++ * An input for raw bytes. This is similar to an InputStream but it is offset addressable. All the
++ * read APIs are relative.
++ */
++@ExperimentalApi
++public abstract class ByteInput {
++
++ /**
++ * Reads a single byte from the given offset.
++ * @param offset The offset from where byte to be read
++ * @return The byte of data at given offset
++ */
++ public abstract byte read(int offset);
++
++ /**
++ * Reads bytes of data from the given offset into an array of bytes.
++ * @param offset The src offset within this ByteInput from where data to be read.
++ * @param out Destination byte array to read data into.
++ * @return The number of bytes read from ByteInput
++ */
++ public int read(int offset, byte b[]) throws IOException {
++ return read(offset, b, 0, b.length);
++ }
++
++ /**
++ * Reads up to <code>len</code> bytes of data from the given offset into an array of bytes.
++ * @param offset The src offset within this ByteInput from where data to be read.
++ * @param out Destination byte array to read data into.
++ * @param outOffset Offset within the the out byte[] where data to be read into.
++ * @param len The number of bytes to read.
++ * @return The number of bytes read from ByteInput
++ */
++ public abstract int read(int offset, byte[] out, int outOffset, int len);
++
++ /**
++ * Reads bytes of data from the given offset into given {@link ByteBuffer}.
++ * @param offset he src offset within this ByteInput from where data to be read.
++ * @param out Destination {@link ByteBuffer} to read data into.
++ * @return The number of bytes read from ByteInput
++ */
++ public abstract int read(int offset, ByteBuffer out);
++
++ /**
++ * @return Total number of bytes in this ByteInput.
++ */
++ public abstract int size();
++}
+diff --git a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInputByteString.java b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInputByteString.java
+new file mode 100644
+index 0000000..1949602
+--- /dev/null
++++ b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInputByteString.java
+@@ -0,0 +1,249 @@
++// Protocol Buffers - Google's data interchange format
++// Copyright 2008 Google Inc. All rights reserved.
++// https://developers.google.com/protocol-buffers/
++//
++// Redistribution and use in source and binary forms, with or without
++// modification, are permitted provided that the following conditions are
++// met:
++//
++// * Redistributions of source code must retain the above copyright
++// notice, this list of conditions and the following disclaimer.
++// * Redistributions in binary form must reproduce the above
++// copyright notice, this list of conditions and the following disclaimer
++// in the documentation and/or other materials provided with the
++// distribution.
++// * Neither the name of Google Inc. nor the names of its
++// contributors may be used to endorse or promote products derived from
++// this software without specific prior written permission.
++//
++// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
++// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
++// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
++// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
++// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
++// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
++// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
++// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
++// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
++// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
++// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
++
++package org.apache.hadoop.hbase.shaded.com.google.protobuf;
++
++import java.io.IOException;
++import java.io.InputStream;
++import java.io.InvalidObjectException;
++import java.io.ObjectInputStream;
++import java.io.OutputStream;
++import java.nio.ByteBuffer;
++import java.nio.charset.Charset;
++import java.util.Arrays;
++import java.util.Collections;
++import java.util.List;
++
++/**
++ * A {@link ByteString} that wraps around a {@link ByteInput}.
++ */
++final class ByteInputByteString extends ByteString.LeafByteString {
++ private final ByteInput buffer;
++ private final int offset, length;
++
++ ByteInputByteString(ByteInput buffer, int offset, int length) {
++ if (buffer == null) {
++ throw new NullPointerException("buffer");
++ }
++ this.buffer = buffer;
++ this.offset = offset;
++ this.length = length;
++ }
++
++ // =================================================================
++ // Serializable
++
++ /**
++ * Magic method that lets us override serialization behavior.
++ */
++ private Object writeReplace() {
++ return ByteString.wrap(toByteArray());
++ }
++
++ /**
++ * Magic method that lets us override deserialization behavior.
++ */
++ private void readObject(@SuppressWarnings("unused") ObjectInputStream in) throws IOException {
++ throw new InvalidObjectException("ByteInputByteString instances are not to be serialized directly");// TODO check here
++ }
++
++ // =================================================================
++
++ @Override
++ public byte byteAt(int index) {
++ return buffer.read(getAbsoluteOffset(index));
++ }
++
++ private int getAbsoluteOffset(int relativeOffset) {
++ return this.offset + relativeOffset;
++ }
++
++ @Override
++ public int size() {
++ return length;
++ }
++
++ @Override
++ public ByteString substring(int beginIndex, int endIndex) {
++ if (beginIndex < 0 || beginIndex >= size() || endIndex < beginIndex || endIndex >= size()) {
++ throw new IllegalArgumentException(
++ String.format("Invalid indices [%d, %d]", beginIndex, endIndex));
++ }
++ return new ByteInputByteString(this.buffer, getAbsoluteOffset(beginIndex), endIndex - beginIndex);
++ }
++
++ @Override
++ protected void copyToInternal(
++ byte[] target, int sourceOffset, int targetOffset, int numberToCopy) {
++ this.buffer.read(getAbsoluteOffset(sourceOffset), target, targetOffset, numberToCopy);
++ }
++
++ @Override
++ public void copyTo(ByteBuffer target) {
++ this.buffer.read(this.offset, target);
++ }
++
++ @Override
++ public void writeTo(OutputStream out) throws IOException {
++ out.write(toByteArray());// TODO
++ }
++
++ @Override
++ boolean equalsRange(ByteString other, int offset, int length) {
++ return substring(0, length).equals(other.substring(offset, offset + length));
++ }
++
++ @Override
++ void writeToInternal(OutputStream out, int sourceOffset, int numberToWrite) throws IOException {
++ byte[] buf = ByteBufferWriter.getOrCreateBuffer(numberToWrite);
++ this.buffer.read(getAbsoluteOffset(sourceOffset), buf, 0, numberToWrite);
++ out.write(buf, 0, numberToWrite);
++ }
++
++ @Override
++ void writeTo(ByteOutput output) throws IOException {
++ output.writeLazy(toByteArray(), 0, length);
++ }
++
++ @Override
++ public ByteBuffer asReadOnlyByteBuffer() {
++ return ByteBuffer.wrap(toByteArray()).asReadOnlyBuffer();
++ }
++
++ @Override
++ public List<ByteBuffer> asReadOnlyByteBufferList() {
++ return Collections.singletonList(asReadOnlyByteBuffer());
++ }
++
++ @Override
++ protected String toStringInternal(Charset charset) {
++ byte[] bytes = toByteArray();
++ return new String(bytes, 0, bytes.length, charset);
++ }
++
++ @Override
++ public boolean isValidUtf8() {
++ return Utf8.isValidUtf8(buffer, offset, offset + length);
++ }
++
++ @Override
++ protected int partialIsValidUtf8(int state, int offset, int length) {
++ int off = getAbsoluteOffset(offset);
++ return Utf8.partialIsValidUtf8(state, buffer, off, off + length);
++ }
++
++ @Override
++ public boolean equals(Object other) {
++ if (other == this) {
++ return true;
++ }
++ if (!(other instanceof ByteString)) {
++ return false;
++ }
++ ByteString otherString = ((ByteString) other);
++ if (size() != otherString.size()) {
++ return false;
++ }
++ if (size() == 0) {
++ return true;
++ }
++ if (other instanceof RopeByteString) {
++ return other.equals(this);
++ }
++ return Arrays.equals(this.toByteArray(), otherString.toByteArray());
++ }
++
++ @Override
++ protected int partialHash(int h, int offset, int length) {
++ offset = getAbsoluteOffset(offset);
++ int end = offset + length;
++ for (int i = offset; i < end; i++) {
++ h = h * 31 + buffer.read(i);
++ }
++ return h;
++ }
++
++ @Override
++ public InputStream newInput() {
++ return new InputStream() {
++ private final ByteInput buf = buffer;
++ private int pos = offset;
++ private int limit = pos + length;
++ private int mark = pos;
++
++ @Override
++ public void mark(int readlimit) {
++ this.mark = readlimit;
++ }
++
++ @Override
++ public boolean markSupported() {
++ return true;
++ }
++
++ @Override
++ public void reset() throws IOException {
++ this.pos = this.mark;
++ }
++
++ @Override
++ public int available() throws IOException {
++ return this.limit - this.pos;
++ }
++
++ @Override
++ public int read() throws IOException {
++ if (available() <= 0) {
++ return -1;
++ }
++ return this.buf.read(pos++) & 0xFF;
++ }
++
++ @Override
++ public int read(byte[] bytes, int off, int len) throws IOException {
++ int remain = available();
++ if (remain <= 0) {
++ return -1;
++ }
++ len = Math.min(len, remain);
++ buf.read(pos, bytes, off, len);
++ pos += len;
++ return len;
++ }
++ };
++ }
++
++ @Override
++ public CodedInputStream newCodedInput() {
++ // We trust CodedInputStream not to modify the bytes, or to give anyone
++ // else access to them.
++ return CodedInputStream.newInstance(buffer, offset, length, true);
++ }
++}
+diff --git a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteString.java b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteString.java
+index 8cae888..ebfa7fa 100644
+--- a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteString.java
++++ b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteString.java
+@@ -323,6 +323,13 @@ public abstract class ByteString implements Iterable<Byte>, Serializable {
+ }
+
+ /**
++ * Wraps the given bytes into a {@code ByteString}. Intended for internal only usage.
++ */
++ static ByteString wrap(ByteInput buffer, int offset, int length) {
++ return new ByteInputByteString(buffer, offset, length);
++ }
++
++ /**
+ * Wraps the given bytes into a {@code ByteString}. Intended for internal only
+ * usage to force a classload of ByteString before LiteralByteString.
+ */
+diff --git a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java
+index 12d70ce..e4c8e05 100644
+--- a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java
++++ b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java
+@@ -149,6 +149,15 @@ public abstract class CodedInputStream {
+ return newInstance(buffer, 0, buffer.length, true);
+ }
+
++ /** Create a new CodedInputStream wrapping the given {@link ByteInput}. */
++ public static CodedInputStream newInstance(ByteInput buf, boolean bufferIsImmutable) {
++ return new ByteInputDecoder(buf, bufferIsImmutable);
++ }
++
++ public static CodedInputStream newInstance(ByteInput buf, int off, int len, boolean bufferIsImmutable) {
++ return new ByteInputDecoder(buf, off, len, bufferIsImmutable);
++ }
++
+ /** Disable construction/inheritance outside of this class. */
+ private CodedInputStream() {}
+
+@@ -2892,4 +2901,652 @@ public abstract class CodedInputStream {
+ pos = size - tempPos;
+ }
+ }
++
++ private static final class ByteInputDecoder extends CodedInputStream {
++
++ private final ByteInput buffer;
++ private final boolean immutable;
++ private int limit;
++ private int bufferSizeAfterLimit;
++ private int pos;
++ private int startPos;
++ private int lastTag;
++ private boolean enableAliasing;
++
++ /** The absolute position of the end of the current message. */
++ private int currentLimit = Integer.MAX_VALUE;
++
++ private ByteInputDecoder(ByteInput buffer, boolean immutable) {
++ this(buffer, 0, buffer.size(), immutable);
++ }
++
++ private ByteInputDecoder(ByteInput buffer, int off, int len, boolean immutable) {
++ this.buffer = buffer;
++ pos = off;
++ limit = off + len;
++ startPos = pos;
++ this.immutable = immutable;
++ }
++
++ @Override
++ public int readTag() throws IOException {
++ if (isAtEnd()) {
++ lastTag = 0;
++ return 0;
++ }
++
++ lastTag = readRawVarint32();
++ if (WireFormat.getTagFieldNumber(lastTag) == 0) {
++ // If we actually read zero (or any tag number corresponding to field
++ // number zero), that's not a valid tag.
++ throw InvalidProtocolBufferException.invalidTag();
++ }
++ return lastTag;
++ }
++
++ @Override
++ public void checkLastTagWas(int value) throws InvalidProtocolBufferException {
++ if (lastTag != value) {
++ throw InvalidProtocolBufferException.invalidEndTag();
++ }
++ }
++
++ @Override
++ public int getLastTag() {
++ return lastTag;
++ }
++
++ @Override
++ public boolean skipField(int tag) throws IOException {
++ switch (WireFormat.getTagWireType(tag)) {
++ case WireFormat.WIRETYPE_VARINT:
++ skipRawVarint();
++ return true;
++ case WireFormat.WIRETYPE_FIXED64:
++ skipRawBytes(FIXED_64_SIZE);
++ return true;
++ case WireFormat.WIRETYPE_LENGTH_DELIMITED:
++ skipRawBytes(readRawVarint32());
++ return true;
++ case WireFormat.WIRETYPE_START_GROUP:
++ skipMessage();
++ checkLastTagWas(
++ WireFormat.makeTag(WireFormat.getTagFieldNumber(tag), WireFormat.WIRETYPE_END_GROUP));
++ return true;
++ case WireFormat.WIRETYPE_END_GROUP:
++ return false;
++ case WireFormat.WIRETYPE_FIXED32:
++ skipRawBytes(FIXED_32_SIZE);
++ return true;
++ default:
++ throw InvalidProtocolBufferException.invalidWireType();
++ }
++ }
++
++ @Override
++ public boolean skipField(int tag, CodedOutputStream output) throws IOException {
++ switch (WireFormat.getTagWireType(tag)) {
++ case WireFormat.WIRETYPE_VARINT:
++ {
++ long value = readInt64();
++ output.writeRawVarint32(tag);
++ output.writeUInt64NoTag(value);
++ return true;
++ }
++ case WireFormat.WIRETYPE_FIXED64:
++ {
++ long value = readRawLittleEndian64();
++ output.writeRawVarint32(tag);
++ output.writeFixed64NoTag(value);
++ return true;
++ }
++ case WireFormat.WIRETYPE_LENGTH_DELIMITED:
++ {
++ ByteString value = readBytes();
++ output.writeRawVarint32(tag);
++ output.writeBytesNoTag(value);
++ return true;
++ }
++ case WireFormat.WIRETYPE_START_GROUP:
++ {
++ output.writeRawVarint32(tag);
++ skipMessage(output);
++ int endtag =
++ WireFormat.makeTag(
++ WireFormat.getTagFieldNumber(tag), WireFormat.WIRETYPE_END_GROUP);
++ checkLastTagWas(endtag);
++ output.writeRawVarint32(endtag);
++ return true;
++ }
++ case WireFormat.WIRETYPE_END_GROUP:
++ {
++ return false;
++ }
++ case WireFormat.WIRETYPE_FIXED32:
++ {
++ int value = readRawLittleEndian32();
++ output.writeRawVarint32(tag);
++ output.writeFixed32NoTag(value);
++ return true;
++ }
++ default:
++ throw InvalidProtocolBufferException.invalidWireType();
++ }
++ }
++
++ @Override
++ public void skipMessage() throws IOException {
++ while (true) {
++ final int tag = readTag();
++ if (tag == 0 || !skipField(tag)) {
++ return;
++ }
++ }
++ }
++
++ @Override
++ public void skipMessage(CodedOutputStream output) throws IOException {
++ while (true) {
++ final int tag = readTag();
++ if (tag == 0 || !skipField(tag, output)) {
++ return;
++ }
++ }
++ }
++
++ public double readDouble() throws IOException {
++ return Double.longBitsToDouble(readRawLittleEndian64());
++ }
++
++ @Override
++ public float readFloat() throws IOException {
++ return Float.intBitsToFloat(readRawLittleEndian32());
++ }
++
++ @Override
++ public long readUInt64() throws IOException {
++ return readRawVarint64();
++ }
++
++ @Override
++ public long readInt64() throws IOException {
++ return readRawVarint64();
++ }
++
++ @Override
++ public int readInt32() throws IOException {
++ return readRawVarint32();
++ }
++
++ @Override
++ public long readFixed64() throws IOException {
++ return readRawLittleEndian64();
++ }
++
++ @Override
++ public int readFixed32() throws IOException {
++ return readRawLittleEndian32();
++ }
++
++ @Override
++ public boolean readBool() throws IOException {
++ return readRawVarint64() != 0;
++ }
++
++ @Override
++ public String readString() throws IOException {
++ final int size = readRawVarint32();
++ if (size > 0 && size <= remaining()) {
++ byte[] bytes = copyToArray(pos, size);
++ pos += size;
++ return new String(bytes, UTF_8);
++ }
++
++ if (size == 0) {
++ return "";
++ }
++ if (size < 0) {
++ throw InvalidProtocolBufferException.negativeSize();
++ }
++ throw InvalidProtocolBufferException.truncatedMessage();
++ }
++
++ @Override
++ public String readStringRequireUtf8() throws IOException {
++ final int size = readRawVarint32();
++ if (size > 0 && size <= remaining()) {
++ if (!Utf8.isValidUtf8(buffer, pos, pos + size)) {
++ throw InvalidProtocolBufferException.invalidUtf8();
++ }
++ byte[] bytes = copyToArray(pos, size);
++ pos += size;
++ return new String(bytes, UTF_8);
++ }
++
++ if (size == 0) {
++ return "";
++ }
++ if (size <= 0) {
++ throw InvalidProtocolBufferException.negativeSize();
++ }
++ throw InvalidProtocolBufferException.truncatedMessage();
++ }
++
++ @Override
++ public void readGroup(int fieldNumber, MessageLite.Builder builder,
++ ExtensionRegistryLite extensionRegistry) throws IOException {
++ if (recursionDepth >= recursionLimit) {
++ throw InvalidProtocolBufferException.recursionLimitExceeded();
++ }
++ ++recursionDepth;
++ builder.mergeFrom(this, extensionRegistry);
++ checkLastTagWas(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP));
++ --recursionDepth;
++ }
++
++ @Override
++ public <T extends MessageLite> T readGroup(int fieldNumber, Parser<T> parser,
++ ExtensionRegistryLite extensionRegistry) throws IOException {
++ if (recursionDepth >= recursionLimit) {
++ throw InvalidProtocolBufferException.recursionLimitExceeded();
++ }
++ ++recursionDepth;
++ T result = parser.parsePartialFrom(this, extensionRegistry);
++ checkLastTagWas(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP));
++ --recursionDepth;
++ return result;
++ }
++
++ @Deprecated
++ @Override
++ public void readUnknownGroup(int fieldNumber, MessageLite.Builder builder) throws IOException {
++ readGroup(fieldNumber, builder, ExtensionRegistryLite.getEmptyRegistry());
++ }
++
++ @Override
++ public void readMessage(MessageLite.Builder builder, ExtensionRegistryLite extensionRegistry)
++ throws IOException {
++ final int length = readRawVarint32();
++ if (recursionDepth >= recursionLimit) {
++ throw InvalidProtocolBufferException.recursionLimitExceeded();
++ }
++ final int oldLimit = pushLimit(length);
++ ++recursionDepth;
++ builder.mergeFrom(this, extensionRegistry);
++ checkLastTagWas(0);
++ --recursionDepth;
++ popLimit(oldLimit);
++ }
++
++ @Override
++ public <T extends MessageLite> T readMessage(Parser<T> parser,
++ ExtensionRegistryLite extensionRegistry) throws IOException {
++ int length = readRawVarint32();
++ if (recursionDepth >= recursionLimit) {
++ throw InvalidProtocolBufferException.recursionLimitExceeded();
++ }
++ final int oldLimit = pushLimit(length);
++ ++recursionDepth;
++ T result = parser.parsePartialFrom(this, extensionRegistry);
++ checkLastTagWas(0);
++ --recursionDepth;
++ popLimit(oldLimit);
++ return result;
++ }
++
++ @Override
++ public ByteString readBytes() throws IOException {
++ final int size = readRawVarint32();
++ if (size > 0 && size <= (limit - pos)) {
++ // Fast path: We already have the bytes in a contiguous buffer, so
++ // just copy directly from it.
++
++ final ByteString result =
++ immutable && enableAliasing
++ ? ByteString.wrap(buffer, pos, size)
++ : ByteString.wrap(copyToArray(pos, size));
++ pos += size;
++ return result;
++ }
++ if (size == 0) {
++ return ByteString.EMPTY;
++ }
++ // Slow path: Build a byte array first then copy it.
++ return ByteString.wrap(readRawBytes(size));
++ }
++
++ @Override
++ public byte[] readByteArray() throws IOException {
++ return readRawBytes(readRawVarint32());
++ }
++
++ @Override
++ public ByteBuffer readByteBuffer() throws IOException {
++ return ByteBuffer.wrap(readByteArray());
++ }
++
++ @Override
++ public int readUInt32() throws IOException {
++ return readRawVarint32();
++ }
++
++ @Override
++ public int readEnum() throws IOException {
++ return readRawVarint32();
++ }
++
++ @Override
++ public int readSFixed32() throws IOException {
++ return readRawLittleEndian32();
++ }
++
++ @Override
++ public long readSFixed64() throws IOException {
++ return readRawLittleEndian64();
++ }
++
++ @Override
++ public int readSInt32() throws IOException {
++ return decodeZigZag32(readRawVarint32());
++ }
++
++ @Override
++ public long readSInt64() throws IOException {
++ return decodeZigZag64(readRawVarint64());
++ }
++
++ @Override
++ public int readRawVarint32() throws IOException {
++ // See implementation notes for readRawVarint64
++ fastpath:
++ {
++ int tempPos = pos;
++
++ if (limit == tempPos) {
++ break fastpath;
++ }
++
++ int x;
++ if ((x = buffer.read(tempPos++)) >= 0) {
++ pos = tempPos;
++ return x;
++ } else if (limit - tempPos < 9) {
++ break fastpath;
++ } else if ((x ^= (buffer.read(tempPos++) << 7)) < 0) {
++ x ^= (~0 << 7);
++ } else if ((x ^= (buffer.read(tempPos++) << 14)) >= 0) {
++ x ^= (~0 << 7) ^ (~0 << 14);
++ } else if ((x ^= (buffer.read(tempPos++) << 21)) < 0) {
++ x ^= (~0 << 7) ^ (~0 << 14) ^ (~0 << 21);
++ } else {
++ int y = buffer.read(tempPos++);
++ x ^= y << 28;
++ x ^= (~0 << 7) ^ (~0 << 14) ^ (~0 << 21) ^ (~0 << 28);
++ if (y < 0
++ && buffer.read(tempPos++) < 0
++ && buffer.read(tempPos++) < 0
++ && buffer.read(tempPos++) < 0
++ && buffer.read(tempPos++) < 0
++ && buffer.read(tempPos++) < 0) {
++ break fastpath; // Will throw malformedVarint()
++ }
++ }
++ pos = tempPos;
++ return x;
++ }
++ return (int) readRawVarint64SlowPath();
++ }
++
++ @Override
++ public long readRawVarint64() throws IOException {
++ fastpath:
++ {
++ int tempPos = pos;
++
++ if (limit == tempPos) {
++ break fastpath;
++ }
++
++ long x;
++ int y;
++ if ((y = buffer.read(tempPos++)) >= 0) {
++ pos = tempPos;
++ return y;
++ } else if (limit - tempPos < 9) {
++ break fastpath;
++ } else if ((y ^= (buffer.read(tempPos++) << 7)) < 0) {
++ x = y ^ (~0 << 7);
++ } else if ((y ^= (buffer.read(tempPos++) << 14)) >= 0) {
++ x = y ^ ((~0 << 7) ^ (~0 << 14));
++ } else if ((y ^= (buffer.read(tempPos++) << 21)) < 0) {
++ x = y ^ ((~0 << 7) ^ (~0 << 14) ^ (~0 << 21));
++ } else if ((x = y ^ ((long) buffer.read(tempPos++) << 28)) >= 0L) {
++ x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28);
++ } else if ((x ^= ((long) buffer.read(tempPos++) << 35)) < 0L) {
++ x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35);
++ } else if ((x ^= ((long) buffer.read(tempPos++) << 42)) >= 0L) {
++ x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35) ^ (~0L << 42);
++ } else if ((x ^= ((long) buffer.read(tempPos++) << 49)) < 0L) {
++ x ^=
++ (~0L << 7)
++ ^ (~0L << 14)
++ ^ (~0L << 21)
++ ^ (~0L << 28)
++ ^ (~0L << 35)
++ ^ (~0L << 42)
++ ^ (~0L << 49);
++ } else {
++ x ^= ((long) buffer.read(tempPos++) << 56);
++ x ^=
++ (~0L << 7)
++ ^ (~0L << 14)
++ ^ (~0L << 21)
++ ^ (~0L << 28)
++ ^ (~0L << 35)
++ ^ (~0L << 42)
++ ^ (~0L << 49)
++ ^ (~0L << 56);
++ if (x < 0L) {
++ if (buffer.read(tempPos++) < 0L) {
++ break fastpath; // Will throw malformedVarint()
++ }
++ }
++ }
++ pos = tempPos;
++ return x;
++ }
++ return readRawVarint64SlowPath();
++ }
++
++ @Override
++ long readRawVarint64SlowPath() throws IOException {
++ long result = 0;
++ for (int shift = 0; shift < 64; shift += 7) {
++ final byte b = readRawByte();
++ result |= (long) (b & 0x7F) << shift;
++ if ((b & 0x80) == 0) {
++ return result;
++ }
++ }
++ throw InvalidProtocolBufferException.malformedVarint();
++ }
++
++ @Override
++ public int readRawLittleEndian32() throws IOException {
++ int tempPos = pos;
++
++ if (limit - tempPos < FIXED_32_SIZE) {
++ throw InvalidProtocolBufferException.truncatedMessage();
++ }
++
++ pos = tempPos + FIXED_32_SIZE;
++ return (((buffer.read(tempPos) & 0xff))
++ | ((buffer.read(tempPos + 1) & 0xff) << 8)
++ | ((buffer.read(tempPos + 2) & 0xff) << 16)
++ | ((buffer.read(tempPos + 3) & 0xff) << 24));
++ }
++
++ @Override
++ public long readRawLittleEndian64() throws IOException {
++ int tempPos = pos;
++
++ if (limit - tempPos < FIXED_64_SIZE) {
++ throw InvalidProtocolBufferException.truncatedMessage();
++ }
++
++ pos = tempPos + FIXED_64_SIZE;
++ return (((buffer.read(tempPos) & 0xffL))
++ | ((buffer.read(tempPos + 1) & 0xffL) << 8)
++ | ((buffer.read(tempPos + 2) & 0xffL) << 16)
++ | ((buffer.read(tempPos + 3) & 0xffL) << 24)
++ | ((buffer.read(tempPos + 4) & 0xffL) << 32)
++ | ((buffer.read(tempPos + 5) & 0xffL) << 40)
++ | ((buffer.read(tempPos + 6) & 0xffL) << 48)
++ | ((buffer.read(tempPos + 7) & 0xffL) << 56));
++ }
++
++ @Override
++ public void enableAliasing(boolean enabled) {
++ this.enableAliasing = enabled;
++ }
++
++ @Override
++ public void resetSizeCounter() {
++ startPos = pos;
++ }
++
++ @Override
++ public int pushLimit(int byteLimit) throws InvalidProtocolBufferException {
++ if (byteLimit < 0) {
++ throw InvalidProtocolBufferException.negativeSize();
++ }
++ byteLimit += getTotalBytesRead();
++ final int oldLimit = currentLimit;
++ if (byteLimit > oldLimit) {
++ throw InvalidProtocolBufferException.truncatedMessage();
++ }
++ currentLimit = byteLimit;
++
++ recomputeBufferSizeAfterLimit();
++
++ return oldLimit;
++ }
++
++ @Override
++ public void popLimit(int oldLimit) {
++ currentLimit = oldLimit;
++ recomputeBufferSizeAfterLimit();
++ }
++
++ @Override
++ public int getBytesUntilLimit() {
++ if (currentLimit == Integer.MAX_VALUE) {
++ return -1;
++ }
++
++ return currentLimit - getTotalBytesRead();
++ }
++
++ @Override
++ public boolean isAtEnd() throws IOException {
++ return pos == limit;
++ }
++
++ @Override
++ public int getTotalBytesRead() {
++ return pos - startPos;
++ }
++
++ @Override
++ public byte readRawByte() throws IOException {
++ if (pos == limit) {
++ throw InvalidProtocolBufferException.truncatedMessage();
++ }
++ return buffer.read(pos++);
++ }
++
++ @Override
++ public byte[] readRawBytes(int length) throws IOException {
++ if (length > 0 && length <= (limit - pos)) {
++ byte[] bytes = copyToArray(pos, length);
++ pos += length;
++ return bytes;
++ }
++
++ if (length <= 0) {
++ if (length == 0) {
++ return Internal.EMPTY_BYTE_ARRAY;
++ } else {
++ throw InvalidProtocolBufferException.negativeSize();
++ }
++ }
++ throw InvalidProtocolBufferException.truncatedMessage();
++ }
++
++ @Override
++ public void skipRawBytes(int length) throws IOException {
++ if (length >= 0 && length <= (limit - pos)) {
++ // We have all the bytes we need already.
++ pos += length;
++ return;
++ }
++
++ if (length < 0) {
++ throw InvalidProtocolBufferException.negativeSize();
++ }
++ throw InvalidProtocolBufferException.truncatedMessage();
++ }
++
++ private void recomputeBufferSizeAfterLimit() {
++ limit += bufferSizeAfterLimit;
++ final int bufferEnd = limit - startPos;
++ if (bufferEnd > currentLimit) {
++ // Limit is in current buffer.
++ bufferSizeAfterLimit = bufferEnd - currentLimit;
++ limit -= bufferSizeAfterLimit;
++ } else {
++ bufferSizeAfterLimit = 0;
++ }
++ }
++
++ private int remaining() {
++ return (int) (limit - pos);
++ }
++
++ private byte[] copyToArray(int begin, int size) throws IOException {
++ try {
++ byte[] bytes = new byte[size];
++ buffer.read(begin, bytes);
++ return bytes;
++ } catch (IOException e) {
++ throw InvalidProtocolBufferException.truncatedMessage();
++ }
++ }
++
++ private void skipRawVarint() throws IOException {
++ if (limit - pos >= MAX_VARINT_SIZE) {
++ skipRawVarintFastPath();
++ } else {
++ skipRawVarintSlowPath();
++ }
++ }
++
++ private void skipRawVarintFastPath() throws IOException {
++ for (int i = 0; i < MAX_VARINT_SIZE; i++) {
++ if (buffer.read(pos++) >= 0) {
++ return;
++ }
++ }
++ throw InvalidProtocolBufferException.malformedVarint();
++ }
++
++ private void skipRawVarintSlowPath() throws IOException {
++ for (int i = 0; i < MAX_VARINT_SIZE; i++) {
++ if (readRawByte() >= 0) {
++ return;
++ }
++ }
++ throw InvalidProtocolBufferException.malformedVarint();
++ }
++ }
+ }
+diff --git a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Utf8.java b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Utf8.java
+index e6a497d..b84efd6 100644
+--- a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Utf8.java
++++ b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Utf8.java
+@@ -229,6 +229,16 @@ final class Utf8 {
+ }
+ }
+
++ private static int incompleteStateFor(ByteInput bytes, int index, int limit) {
++ int byte1 = bytes.read(index - 1);
++ switch (limit - index) {
++ case 0: return incompleteStateFor(byte1);
++ case 1: return incompleteStateFor(byte1, bytes.read(index));
++ case 2: return incompleteStateFor(byte1, bytes.read(index), bytes.read(index + 1));
++ default: throw new AssertionError();
++ }
++ }
++
+ // These UTF-8 handling methods are copied from Guava's Utf8 class with a modification to throw
+ // a protocol buffer local exception. This exception is then caught in CodedOutputStream so it can
+ // fallback to more lenient behavior.
+@@ -332,6 +342,24 @@ final class Utf8 {
+ }
+
+ /**
++ * Determines if the given {@link ByteInput} is a valid UTF-8 string.
++ *
++ * @param buffer the buffer to check.
++ */
++ static boolean isValidUtf8(ByteInput buffer, int index, int limit) {
++ return processor.isValidUtf8(buffer, index, limit);
++ }
++
++ /**
++ * Determines if the given {@link ByteInput} is a partially valid UTF-8 string.
++ *
++ * @param buffer the buffer to check.
++ */
++ static int partialIsValidUtf8(int state, ByteInput buffer, int index, int limit) {
++ return processor.partialIsValidUtf8(state, buffer, index, limit);
++ }
++
++ /**
+ * Encodes the given characters to the target {@link ByteBuffer} using UTF-8 encoding.
+ *
+ * <p>Selects an optimal algorithm based on the type of {@link ByteBuffer} (i.e. heap or direct)
+@@ -610,6 +638,169 @@ final class Utf8 {
+ }
+ }
+
++ public boolean isValidUtf8(ByteInput buffer, int index, int limit) {
++ return partialIsValidUtf8(COMPLETE, buffer, index, limit) == COMPLETE;
++ }
++
++ int partialIsValidUtf8(int state, ByteInput bytes, int index, int limit) {
++ if (state != COMPLETE) {
++ // The previous decoding operation was incomplete (or malformed).
++ // We look for a well-formed sequence consisting of bytes from
++ // the previous decoding operation (stored in state) together
++ // with bytes from the array slice.
++ //
++ // We expect such "straddler characters" to be rare.
++
++ if (index >= limit) { // No bytes? No progress.
++ return state;
++ }
++ int byte1 = (byte) state;
++ // byte1 is never ASCII.
++ if (byte1 < (byte) 0xE0) {
++ // two-byte form
++
++ // Simultaneously checks for illegal trailing-byte in
++ // leading position and overlong 2-byte form.
++ if (byte1 < (byte) 0xC2
++ // byte2 trailing-byte test
++ || bytes.read(index++) > (byte) 0xBF) {
++ return MALFORMED;
++ }
++ } else if (byte1 < (byte) 0xF0) {
++ // three-byte form
++
++ // Get byte2 from saved state or array
++ int byte2 = (byte) ~(state >> 8);
++ if (byte2 == 0) {
++ byte2 = bytes.read(index++);
++ if (index >= limit) {
++ return incompleteStateFor(byte1, byte2);
++ }
++ }
++ if (byte2 > (byte) 0xBF
++ // overlong? 5 most significant bits must not all be zero
++ || (byte1 == (byte) 0xE0 && byte2 < (byte) 0xA0)
++ // illegal surrogate codepoint?
++ || (byte1 == (byte) 0xED && byte2 >= (byte) 0xA0)
++ // byte3 trailing-byte test
++ || bytes.read(index++) > (byte) 0xBF) {
++ return MALFORMED;
++ }
++ } else {
++ // four-byte form
++
++ // Get byte2 and byte3 from saved state or array
++ int byte2 = (byte) ~(state >> 8);
++ int byte3 = 0;
++ if (byte2 == 0) {
++ byte2 = bytes.read(index++);
++ if (index >= limit) {
++ return incompleteStateFor(byte1, byte2);
++ }
++ } else {
++ byte3 = (byte) (state >> 16);
++ }
++ if (byte3 == 0) {
++ byte3 = bytes.read(index++);
++ if (index >= limit) {
++ return incompleteStateFor(byte1, byte2, byte3);
++ }
++ }
++
++ // If we were called with state == MALFORMED, then byte1 is 0xFF,
++ // which never occurs in well-formed UTF-8, and so we will return
++ // MALFORMED again below.
++
++ if (byte2 > (byte) 0xBF
++ // Check that 1 <= plane <= 16. Tricky optimized form of:
++ // if (byte1 > (byte) 0xF4 ||
++ // byte1 == (byte) 0xF0 && byte2 < (byte) 0x90 ||
++ // byte1 == (byte) 0xF4 && byte2 > (byte) 0x8F)
++ || (((byte1 << 28) + (byte2 - (byte) 0x90)) >> 30) != 0
++ // byte3 trailing-byte test
++ || byte3 > (byte) 0xBF
++ // byte4 trailing-byte test
++ || bytes.read(index++) > (byte) 0xBF) {
++ return MALFORMED;
++ }
++ }
++ }
++
++ return partialIsValidUtf8(bytes, index, limit);
++ }
++
++ private static int partialIsValidUtf8(ByteInput bytes, int index, int limit) {
++ // Optimize for 100% ASCII (Hotspot loves small simple top-level loops like this).
++ // This simple loop stops when we encounter a byte >= 0x80 (i.e. non-ASCII).
++ while (index < limit && bytes.read(index) >= 0) {
++ index++;
++ }
++
++ return (index >= limit) ? COMPLETE : partialIsValidUtf8NonAscii(bytes, index, limit);
++ }
++
++ private static int partialIsValidUtf8NonAscii(ByteInput bytes, int index, int limit) {
++ for (;;) {
++ int byte1, byte2;
++
++ // Optimize for interior runs of ASCII bytes.
++ do {
++ if (index >= limit) {
++ return COMPLETE;
++ }
++ } while ((byte1 = bytes.read(index++)) >= 0);
++
++ if (byte1 < (byte) 0xE0) {
++ // two-byte form
++
++ if (index >= limit) {
++ // Incomplete sequence
++ return byte1;
++ }
++
++ // Simultaneously checks for illegal trailing-byte in
++ // leading position and overlong 2-byte form.
++ if (byte1 < (byte) 0xC2
++ || bytes.read(index++) > (byte) 0xBF) {
++ return MALFORMED;
++ }
++ } else if (byte1 < (byte) 0xF0) {
++ // three-byte form
++
++ if (index >= limit - 1) { // incomplete sequence
++ return incompleteStateFor(bytes, index, limit);
++ }
++ if ((byte2 = bytes.read(index++)) > (byte) 0xBF
++ // overlong? 5 most significant bits must not all be zero
++ || (byte1 == (byte) 0xE0 && byte2 < (byte) 0xA0)
++ // check for illegal surrogate codepoints
++ || (byte1 == (byte) 0xED && byte2 >= (byte) 0xA0)
++ // byte3 trailing-byte test
++ || bytes.read(index++) > (byte) 0xBF) {
++ return MALFORMED;
++ }
++ } else {
++ // four-byte form
++
++ if (index >= limit - 2) { // incomplete sequence
++ return incompleteStateFor(bytes, index, limit);
++ }
++ if ((byte2 = bytes.read(index++)) > (byte) 0xBF
++ // Check that 1 <= plane <= 16. Tricky optimized form of:
++ // if (byte1 > (byte) 0xF4 ||
++ // byte1 == (byte) 0xF0 && byte2 < (byte) 0x90 ||
++ // byte1 == (byte) 0xF4 && byte2 > (byte) 0x8F)
++ || (((byte1 << 28) + (byte2 - (byte) 0x90)) >> 30) != 0
++ // byte3 trailing-byte test
++ || bytes.read(index++) > (byte) 0xBF
++ // byte4 trailing-byte test
++ || bytes.read(index++) > (byte) 0xBF) {
++ return MALFORMED;
++ }
++ }
++ }
++ }
++
+ /**
+ * Encodes an input character sequence ({@code in}) to UTF-8 in the target array ({@code out}).
+ * For a string, this method is similar to
http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/8b127cb7/hbase-shaded-protobuf/src/main/patches/HBASE-17087.patch
----------------------------------------------------------------------
diff --git a/hbase-shaded-protobuf/src/main/patches/HBASE-17087.patch b/hbase-shaded-protobuf/src/main/patches/HBASE-17087.patch
new file mode 100644
index 0000000..dc11cd8
--- /dev/null
+++ b/hbase-shaded-protobuf/src/main/patches/HBASE-17087.patch
@@ -0,0 +1,14 @@
+diff --git a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInputByteString.java b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInputByteString.java
+index 1949602..30de4ec 100644
+--- a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInputByteString.java
++++ b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInputByteString.java
+@@ -244,6 +244,8 @@ final class ByteInputByteString extends ByteString.LeafByteString {
+ public CodedInputStream newCodedInput() {
+ // We trust CodedInputStream not to modify the bytes, or to give anyone
+ // else access to them.
+- return CodedInputStream.newInstance(buffer, offset, length, true);
++ CodedInputStream cis = CodedInputStream.newInstance(buffer, offset, length, true);
++ cis.enableAliasing(true);
++ return cis;
+ }
+ }
http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/8b127cb7/hbase-shaded-protobuf/src/main/patches/HBASE-17239.patch
----------------------------------------------------------------------
diff --git a/hbase-shaded-protobuf/src/main/patches/HBASE-17239.patch b/hbase-shaded-protobuf/src/main/patches/HBASE-17239.patch
new file mode 100644
index 0000000..b60f67f
--- /dev/null
+++ b/hbase-shaded-protobuf/src/main/patches/HBASE-17239.patch
@@ -0,0 +1,44 @@
+ .../hbase/shaded/com/google/protobuf/CodedInputStream.java | 6 +-----
+ .../shaded/com/google/protobuf/UnsafeByteOperations.java | 11 +++++++++++
+ .../src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java | 7 ++++---
+ 3 files changed, 16 insertions(+), 8 deletions(-)
+
+diff --git a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java
+index 0ad20e5..0bff626 100644
+--- a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java
++++ b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java
+@@ -150,11 +150,7 @@ public abstract class CodedInputStream {
+ }
+
+ /** Create a new CodedInputStream wrapping the given {@link ByteInput}. */
+- public static CodedInputStream newInstance(ByteInput buf, boolean bufferIsImmutable) {
+- return new ByteInputDecoder(buf, bufferIsImmutable);
+- }
+-
+- public static CodedInputStream newInstance(ByteInput buf, int off, int len, boolean bufferIsImmutable) {
++ static CodedInputStream newInstance(ByteInput buf, int off, int len, boolean bufferIsImmutable) {
+ return new ByteInputDecoder(buf, off, len, bufferIsImmutable);
+ }
+
+diff --git a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnsafeByteOperations.java b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnsafeByteOperations.java
+index 3d53f2e..ad99372 100644
+--- a/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnsafeByteOperations.java
++++ b/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnsafeByteOperations.java
+@@ -98,6 +98,17 @@ public final class UnsafeByteOperations {
+ }
+
+ /**
++ * An unsafe operation that returns a {@link ByteString} that is backed by the provided buffer.
++ * @param buffer the ByteInput buffer to be wrapped
++ * @param offset the offset of the wrapped byteinput
++ * @param length the number of bytes of the byteinput
++ * @return a {@link ByteString} backed by the provided buffer
++ */
++ public static ByteString unsafeWrap(ByteInput buffer, int offset, int len) {
++ return ByteString.wrap(buffer, offset, len);
++ }
++
++ /**
+ * Writes the given {@link ByteString} to the provided {@link ByteOutput}. Calling this method may
+ * result in multiple operations on the target {@link ByteOutput}
+ * (i.e. for roped {@link ByteString}s).
http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/8b127cb7/hbase-shaded-thirdparty/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-shaded-thirdparty/pom.xml b/hbase-shaded-thirdparty/pom.xml
new file mode 100644
index 0000000..5b0baef
--- /dev/null
+++ b/hbase-shaded-thirdparty/pom.xml
@@ -0,0 +1,120 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+ ON MVN COMPILE NOT WORKING
+
+ If you wondering why 'mvn compile' does not work building HBase
+ (in particular, if you are doing it for the first time), instead do
+ 'mvn package'. If you are interested in the full story, see
+ https://issues.apache.org/jira/browse/HBASE-6795.
+
+-->
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.hbase.thirdparty</groupId>
+ <artifactId>hbase-thirdparty</artifactId>
+ <version>1.0.0-SNAPSHOT</version>
+ <relativePath>..</relativePath>
+ </parent>
+ <artifactId>hbase-shaded-thirdparty</artifactId>
+ <name>Apache HBase Relocated (Shaded) Third-party Libs</name>
+ <description>
+ Pulls down libs, relocates them and then makes a fat new jar with them all in it.
+ </description>
+ <build>
+ <plugins>
+ <plugin>
+ <artifactId>maven-clean-plugin</artifactId>
+ <configuration>
+ <filesets>
+ <fileset>
+ <directory>${basedir}</directory>
+ <includes>
+ <include>dependency-reduced-pom.xml</include>
+ </includes>
+ </fileset>
+ </filesets>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <version>3.0.0</version>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>shade</goal>
+ </goals>
+ <configuration>
+ <shadeSourcesContent>true</shadeSourcesContent>
+ <relocations>
+ <relocation>
+ <pattern>io.netty</pattern>
+ <shadedPattern>${rename.offset}.io.netty</shadedPattern>
+ </relocation>
+ <relocation>
+ <pattern>com.google</pattern>
+ <shadedPattern>${rename.offset}.com.google</shadedPattern>
+ </relocation>
+ </relocations>
+ <artifactSet>
+ <excludes>
+ <!--Exclude protobuf itself. We get a patched version in adjacent module.
+ -->
+ <exclude>com.google.protobuf:protobuf-java</exclude>
+ <exclude>com.google.code.findbugs:jsr305</exclude>
+ <exclude>com.google.errorprone:error_prone_annotations</exclude>
+ <exclude>com.google.j2objc:j2objc-annotations</exclude>
+ <exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude>
+ <exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude>
+ </excludes>
+ </artifactSet>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>com.google.code.gson</groupId>
+ <artifactId>gson</artifactId>
+ <version>2.8.1</version>
+ </dependency>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>22.0</version>
+ </dependency>
+ <dependency>
+ <groupId>com.google.protobuf</groupId>
+ <artifactId>protobuf-java-util</artifactId>
+ <version>${protobuf.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>io.netty</groupId>
+ <artifactId>netty-all</artifactId>
+ <version>4.1.12.Final</version>
+ </dependency>
+ </dependencies>
+</project>
http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/8b127cb7/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 7f71df4..7e46dc1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -37,11 +37,12 @@
<!-- no parent resolution -->
</parent>
<groupId>org.apache.hbase.thirdparty</groupId>
- <artifactId>hbase-thirdparty-shaded</artifactId>
+ <artifactId>hbase-thirdparty</artifactId>
<version>1.0.0-SNAPSHOT</version>
- <name>Apache HBase Relocated Third-party Libraries</name>
+ <name>Apache HBase Third-Party Libs</name>
+ <packaging>pom</packaging>
<description>
- Hosts third-party, relocated (renamed, shaded) libraries.
+ Hosts relocated (renamed, shaded) third-party libraries used by hbase.
</description>
<url>http://hbase.apache.org</url>
<inceptionYear>2017</inceptionYear>
@@ -52,6 +53,10 @@
<distribution>repo</distribution>
</license>
</licenses>
+ <modules>
+ <module>hbase-shaded-protobuf</module>
+ <module>hbase-shaded-thirdparty</module>
+ </modules>
<scm>
<connection>scm:git:git://git.apache.org/hbase-thirdparty.git</connection>
<developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/hbase-thirdparty.git</developerConnection>
@@ -111,17 +116,6 @@
<developers/>
<!--TODO-->
<properties>
- <!-- override on command line to have generated LICENSE files include
- diagnostic info for verifying notice requirements -->
- <license.debug.print.included>false</license.debug.print.included>
- <!-- When a particular module bundles its depenendencies, should be true -->
- <license.bundles.dependencies>false</license.bundles.dependencies>
- <!-- modules that include a the logo in their source tree should set true -->
- <license.bundles.logo>false</license.bundles.logo>
- <!-- modules that include bootstrap in their source tree should set true -->
- <license.bundles.bootstrap>false</license.bundles.bootstrap>
- <!-- modules that include jquery in their source tree should set true -->
- <license.bundles.jquery>false</license.bundles.jquery>
<maven.build.timestamp.format>
yyyy-MM-dd'T'HH:mm
</maven.build.timestamp.format>
@@ -133,70 +127,104 @@
<rename.offset>org.apache.hadoop.hbase.shaded</rename.offset>
</properties>
<build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-remote-resources-plugin</artifactId>
- <version>1.5</version>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-release-plugin</artifactId>
- <configuration>
- <!--You need this profile. It'll sign your artifacts.
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-release-plugin</artifactId>
+ <configuration>
+ <!--You need this profile. It'll sign your artifacts.
I'm not sure if this config. actually works though.
I've been specifying -Papache-release on the command-line
-->
- <releaseProfiles>apache-release</releaseProfiles>
- <!--This stops our running tests for each stage of maven release.
+ <releaseProfiles>apache-release</releaseProfiles>
+ <!--This stops our running tests for each stage of maven release.
But it builds the test jar. From SUREFIRE-172.
-->
- <arguments>-Dmaven.test.skip.exec ${arguments}</arguments>
- <goals>${goals}</goals>
- <pomFileName>pom.xml</pomFileName>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>build-helper-maven-plugin</artifactId>
- <version>1.9.1</version>
- </plugin>
+ <arguments>-Dmaven.test.skip.exec ${arguments}</arguments>
+ <goals>${goals}</goals>
+ <pomFileName>pom.xml</pomFileName>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <version>1.9.1</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.rat</groupId>
+ <artifactId>apache-rat-plugin</artifactId>
+ <version>0.11</version>
+ <configuration>
+ <excludes>
+ <exclude>**/*.versionsBackup</exclude>
+ <exclude>**/*.log</exclude>
+ <exclude>**/.*</exclude>
+ <exclude>**/*.tgz</exclude>
+ <exclude>**/*.orig</exclude>
+ <exclude>**/.idea/**</exclude>
+ <exclude>**/*.iml</exclude>
+ <exclude>**/CHANGES.txt</exclude>
+ <exclude>**/generated/**</exclude>
+ <exclude>**/gen-*/**</exclude>
+ <exclude>**/*.avpr</exclude>
+ <exclude>**/*.svg</exclude>
+ <!-- vector graphics -->
+ <exclude>**/*.vm</exclude>
+ <!-- apache doxia generated -->
+ <exclude>**/control</exclude>
+ <exclude>**/conffile</exclude>
+ <!-- auto-gen docs -->
+ <exclude>docs/*</exclude>
+ <exclude>logs/*</exclude>
+ <!-- exclude source control files -->
+ <exclude>.git/**</exclude>
+ <exclude>.svn/**</exclude>
+ <exclude>**/.settings/**</exclude>
+ <exclude>**/patchprocess/**</exclude>
+ <exclude>**/dependency-reduced-pom.xml</exclude>
+ <exclude>**/rat.txt</exclude>
+ <!-- exclude the shaded protobuf files -->
+ <exclude>**/src/main/patches/**</exclude>
+ </excludes>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>buildnumber-maven-plugin</artifactId>
+ <version>1.4</version>
+ <executions>
+ <execution>
+ <phase>validate</phase>
+ <goals>
+ <goal>create-timestamp</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <timestampFormat>yyyy</timestampFormat>
+ <timestampPropertyName>build.year</timestampPropertyName>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <version>3.0.0</version>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+ <plugins>
<plugin>
- <groupId>org.apache.rat</groupId>
- <artifactId>apache-rat-plugin</artifactId>
- <version>0.11</version>
+ <artifactId>maven-clean-plugin</artifactId>
<configuration>
- <excludes>
- <exclude>**/*.versionsBackup</exclude>
- <exclude>**/*.log</exclude>
- <exclude>**/.*</exclude>
- <exclude>**/*.tgz</exclude>
- <exclude>**/*.orig</exclude>
- <exclude>**/.idea/**</exclude>
- <exclude>**/*.iml</exclude>
- <exclude>**/CHANGES.txt</exclude>
- <exclude>**/generated/**</exclude>
- <exclude>**/gen-*/**</exclude>
- <exclude>**/*.avpr</exclude>
- <exclude>**/*.svg</exclude>
- <!-- vector graphics -->
- <exclude>**/*.vm</exclude>
- <!-- apache doxia generated -->
- <exclude>**/control</exclude>
- <exclude>**/conffile</exclude>
- <!-- auto-gen docs -->
- <exclude>docs/*</exclude>
- <exclude>logs/*</exclude>
- <!-- exclude source control files -->
- <exclude>.git/**</exclude>
- <exclude>.svn/**</exclude>
- <exclude>**/.settings/**</exclude>
- <exclude>**/patchprocess/**</exclude>
- <exclude>**/dependency-reduced-pom.xml</exclude>
- <exclude>**/rat.txt</exclude>
- <!-- exclude the shaded protobuf files -->
- <exclude>**/src/main/patches/**</exclude>
- </excludes>
+ <filesets>
+ <fileset>
+ <directory>${basedir}</directory>
+ <includes>
+ <include>dependency-reduced-pom.xml</include>
+ </includes>
+ </fileset>
+ </filesets>
</configuration>
</plugin>
<plugin>
@@ -233,9 +261,7 @@
<requireJavaVersion>
<version>[${java.min.version},)</version>
<message>Java is out of date.
- HBase requirs at least version ${java.min.version} of the JDK to properly build from source.
- You appear to be using an older version. You can use either "mvn -version" or
- "mvn enforcer:display-info" to verify what version is active.
+ HBase requires at least version ${java.min.version} of the JDK to properly build from source.
See the reference guide on building for more information: http://hbase.apache.org/book.html#build
</message>
</requireJavaVersion>
@@ -250,98 +276,9 @@
</execution>
</executions>
</plugin>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>buildnumber-maven-plugin</artifactId>
- <version>1.4</version>
- <executions>
- <execution>
- <phase>validate</phase>
- <goals>
- <goal>create-timestamp</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <timestampFormat>yyyy</timestampFormat>
- <timestampPropertyName>build.year</timestampPropertyName>
- </configuration>
- </plugin>
- <plugin>
- <artifactId>maven-clean-plugin</artifactId>
- <configuration>
- <filesets>
- <fileset>
- <directory>${basedir}</directory>
- <includes>
- <include>dependency-reduced-pom.xml</include>
- </includes>
- </fileset>
- </filesets>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-shade-plugin</artifactId>
- <version>3.0.0</version>
- <executions>
- <execution>
- <phase>package</phase>
- <goals>
- <goal>shade</goal>
- </goals>
- <configuration>
- <shadeSourcesContent>true</shadeSourcesContent>
- <relocations>
- <relocation>
- <pattern>io.netty</pattern>
- <shadedPattern>${rename.offset}.io.netty</shadedPattern>
- </relocation>
- <relocation>
- <pattern>com.google</pattern>
- <shadedPattern>${rename.offset}.com.google</shadedPattern>
- </relocation>
- </relocations>
- <artifactSet>
- <excludes>
- <!--Exclude protobuf itself. We get a patched version later over in hbase core-->
- <exclude>com.google.protobuf:protobuf-java</exclude>
- <exclude>com.google.code.findbugs:jsr305</exclude>
- <exclude>com.google.errorprone:error_prone_annotations</exclude>
- <exclude>com.google.j2objc:j2objc-annotations</exclude>
- <exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude>
- <exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude>
- </excludes>
- </artifactSet>
- </configuration>
- </execution>
- </executions>
- </plugin>
</plugins>
</build>
<!--These are the third-party libs we want to relocate-->
- <dependencies>
- <dependency>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- <version>2.8.1</version>
- </dependency>
- <dependency>
- <groupId>com.google.guava</groupId>
- <artifactId>guava</artifactId>
- <version>22.0</version>
- </dependency>
- <dependency>
- <groupId>com.google.protobuf</groupId>
- <artifactId>protobuf-java-util</artifactId>
- <version>${protobuf.version}</version>
- </dependency>
- <dependency>
- <groupId>io.netty</groupId>
- <artifactId>netty-all</artifactId>
- <version>4.1.12.Final</version>
- </dependency>
- </dependencies>
<profiles>
<!-- this profile should be activated for release builds -->
<profile>