You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2016/09/30 21:57:17 UTC
[28/29] hbase git commit: HBASE-16741 Amend the generate protobufs
out-of-band build step to include shade,
pulling in protobuf source and a hook for patching protobuf
http://git-wip-us.apache.org/repos/asf/hbase/blob/32be831c/hbase-protocol-shaded/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index 1bd0aa6..e0e9eec 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -1,6 +1,6 @@
<?xml version="1.0"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <!--
+ <!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@@ -19,112 +19,44 @@
* limitations under the License.
*/
-->
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <artifactId>hbase</artifactId>
- <groupId>org.apache.hbase</groupId>
- <version>2.0.0-SNAPSHOT</version>
- <relativePath>..</relativePath>
- </parent>
-
- <artifactId>hbase-protocol-shaded</artifactId>
- <name>Apache HBase - Shaded Protocol</name>
- <description>Shaded protobuf protocol classes used by HBase internally.</description>
-
- <properties>
- <maven.javadoc.skip>true</maven.javadoc.skip>
- <!--Version of protobuf that hbase uses internally (we shade our pb)
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>hbase</artifactId>
+ <groupId>org.apache.hbase</groupId>
+ <version>2.0.0-SNAPSHOT</version>
+ <relativePath>..</relativePath>
+ </parent>
+ <artifactId>hbase-protocol-shaded</artifactId>
+ <name>Apache HBase - Shaded Protocol</name>
+ <description>Shaded protobuf protocol classes used by HBase internally.</description>
+ <properties>
+ <maven.javadoc.skip>true</maven.javadoc.skip>
+ <!--Version of protobuf that hbase uses internally (we shade our pb)
-->
- <internal.protobuf.version>3.1.0</internal.protobuf.version>
- </properties>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-shade-plugin</artifactId>
- <version>2.4.3</version>
- <executions>
- <execution>
- <phase>package</phase>
- <goals>
- <goal>shade</goal>
- </goals>
- <configuration>
- <relocations>
- <relocation>
- <pattern>com.google.protobuf</pattern>
- <shadedPattern>org.apache.hadoop.hbase.shaded.com.google.protobuf</shadedPattern>
- </relocation>
- </relocations>
- <!-- What I got when I did a mvn dependency:list for this
- module. Exclude all but the protobuf
- [INFO] commons-logging:commons-logging:jar:1.2:compile
- [INFO] com.github.stephenc.findbugs:findbugs-annotations:jar:1.3.9-1:compile
- [INFO] log4j:log4j:jar:1.2.17:compile
- [INFO] com.google.protobuf:protobuf-java:jar:2.5.0:compile
- [INFO] org.hamcrest:hamcrest-core:jar:1.3:test
- [INFO] org.mockito:mockito-all:jar:1.10.8:test
- [INFO] junit:junit:jar:4.12:compile
- [INFO] org.apache.hbase:hbase-annotations:jar:2.0.0-SNAPSHOT:compile
-
- The list below must exlude all of the above except protobuf.
- -->
- <artifactSet>
- <excludes>
- <exclude>commons-logging:commons-logging</exclude>
- <exclude>com.github.stephenc.findbugs:findbugs-annotations</exclude>
- <exclude>log4j:log4j</exclude>
- <exclude>org.hamcrest:hamcrest-core</exclude>
- <exclude>org.mockito:mockito-all</exclude>
- <exclude>junit:junit</exclude>
- <exclude>org.apache.hbase:hbase-annotations</exclude>
- </excludes>
- </artifactSet>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <!--Now unpack the shaded jar made above so the shaded classes
- are available to subsequent modules-->
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
- <version>2.10</version>
- <executions>
- <execution>
- <id>unpack</id>
- <phase>package</phase>
- <goals>
- <goal>unpack</goal>
- </goals>
- <configuration>
- <artifactItems>
- <artifactItem>
- <groupId>org.apache.hbase</groupId>
- <artifactId>hbase-protocol-shaded</artifactId>
- <version>${project.version}</version>
- <type>jar</type>
- <overWrite>true</overWrite>
- <outputDirectory>${project.build.directory}/classes</outputDirectory>
- </artifactItem>
- </artifactItems>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-site-plugin</artifactId>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
- <!-- Make a jar and put the sources in the jar -->
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-source-plugin</artifactId>
- </plugin>
+ <internal.protobuf.version>3.1.0</internal.protobuf.version>
+ <!--The Default target dir-->
+ <classes.dir>${basedir}/target</classes.dir>
+ <!--The Default location for sources-->
+ <sources.dir>src/main/java</sources.dir>
+ </properties>
+ <build>
+ <!--I want to override these in profile so define them
+ with variables up here-->
+ <sourceDirectory>${sources.dir}</sourceDirectory>
+ <outputDirectory>${classes.dir}</outputDirectory>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-site-plugin</artifactId>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
+ <!-- Make a jar and put the sources in the jar -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-source-plugin</artifactId>
+ </plugin>
<plugin>
<!--Make it so assembly:single does nothing in here-->
<artifactId>maven-assembly-plugin</artifactId>
@@ -133,163 +65,283 @@
<skipAssembly>true</skipAssembly>
</configuration>
</plugin>
+ <plugin>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <!-- Always skip the second part executions, since we only run simple unit tests in this module -->
+ <executions>
+ <execution>
+ <id>secondPartTestsExecution</id>
+ <phase>test</phase>
+ <goals>
+ <goal>test</goal>
+ </goals>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ <pluginManagement>
+ <plugins>
+ <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
<plugin>
- <artifactId>maven-surefire-plugin</artifactId>
- <!-- Always skip the second part executions, since we only run simple unit tests in this module -->
- <executions>
- <execution>
- <id>secondPartTestsExecution</id>
- <phase>test</phase>
+ <groupId>org.eclipse.m2e</groupId>
+ <artifactId>lifecycle-mapping</artifactId>
+ <version>1.0.0</version>
+ <configuration>
+ <lifecycleMappingMetadata>
+ <pluginExecutions>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-maven-plugins</artifactId>
+ <versionRange>[2.0.5-alpha,)</versionRange>
<goals>
- <goal>test</goal>
+ <goal>protoc</goal>
</goals>
- <configuration>
- <skip>true</skip>
- </configuration>
- </execution>
- </executions>
+ </pluginExecutionFilter>
+ <action>
+ <ignore/>
+ </action>
+ </pluginExecution>
+ </pluginExecutions>
+ </lifecycleMappingMetadata>
+ </configuration>
</plugin>
- </plugins>
- <pluginManagement>
+ </plugins>
+ </pluginManagement>
+ </build>
+ <dependencies>
+ <!--BE CAREFUL! Any dependency added here needs to be
+ excluded above in the shade plugin else the dependency
+ will get bundled-->
+ <!-- Intra-project dependencies -->
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-annotations</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>jdk.tools</groupId>
+ <artifactId>jdk.tools</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <!-- General dependencies -->
+ <dependency>
+ <groupId>com.google.protobuf</groupId>
+ <artifactId>protobuf-java</artifactId>
+ <version>${internal.protobuf.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ </dependency>
+ </dependencies>
+ <profiles>
+ <!-- Skip the tests in this module -->
+ <profile>
+ <id>skip-protocol-shaded-tests</id>
+ <activation>
+ <property>
+ <name>skip-protocol-shaded-tests</name>
+ </property>
+ </activation>
+ <properties>
+ <surefire.skipFirstPart>true</surefire.skipFirstPart>
+ </properties>
+ </profile>
+ <!--
+ Generate shaded classes using proto files and
+ the protobuf lib we depend on. Drops generated
+ files under src/main/java. Check in the generated
+ files so available at build time. Run this
+ profile/step everytime you change proto
+ files or update the protobuf version. If you add a
+ proto, be sure to add it to the list below in the
+ hadoop-maven-plugin else we won't 'see' it.
+
+ The below first generates java files from protos.
+ We then compile the generated files and make a jar
+ file. The jar file is then passed to the shade plugin
+ which makes a new fat jar that includes the protobuf
+ lib but with all relocated given the
+ org.apache.hadoop.hbase.shaded prefix. The shading
+ step as by-product produces a jar with relocated
+ java source files in it. This jar we then unpack over
+ the src/main/java directory and we're done.
+
+ User is expected to check in the changes if they look
+ good.
+
+ TODO: Patch the protobuf lib using maven-patch-plugin
+ with changes we need.
+ -->
+ <profile>
+ <id>generate-shaded-classes</id>
+ <activation>
+ <property>
+ <name>generate-shaded-classes</name>
+ </property>
+ </activation>
+ <properties>
+ <profile.id>generate-shaded-classes</profile.id>
+ <sources.dir>${project.build.directory}/protoc-generated-sources</sources.dir>
+ <classes.dir>${project.build.directory}/protoc-generated-classes</classes.dir>
+ <!--When the compile for this profile runs, make sure it makes jars that
+ can be related back to this shading profile. Give them a shading prefix.
+ -->
+ <jar.finalName>${profile.id}.${artifactId}-${project.version}</jar.finalName>
+ </properties>
+ <build>
<plugins>
- <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
<plugin>
- <groupId>org.eclipse.m2e</groupId>
- <artifactId>lifecycle-mapping</artifactId>
- <version>1.0.0</version>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-maven-plugins</artifactId>
<configuration>
- <lifecycleMappingMetadata>
- <pluginExecutions>
- <pluginExecution>
- <pluginExecutionFilter>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-maven-plugins</artifactId>
- <versionRange>[2.0.5-alpha,)</versionRange>
- <goals>
- <goal>protoc</goal>
- </goals>
- </pluginExecutionFilter>
- <action>
- <ignore></ignore>
- </action>
- </pluginExecution>
- </pluginExecutions>
- </lifecycleMappingMetadata>
+ <protocVersion>${internal.protobuf.version}</protocVersion>
</configuration>
+ <executions>
+ <execution>
+ <id>compile-protoc</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>protoc</goal>
+ </goals>
+ <configuration>
+ <imports>
+ <param>${basedir}/src/main/protobuf</param>
+ </imports>
+ <source>
+ <directory>${basedir}/src/main/protobuf</directory>
+ <!-- Unfortunately, Hadoop plugin does not support *.proto.
+ We have to individually list every proto file here -->
+ <includes>
+ <include>Admin.proto</include>
+ <include>Cell.proto</include>
+ <include>Client.proto</include>
+ <include>ClusterId.proto</include>
+ <include>ClusterStatus.proto</include>
+ <include>Comparator.proto</include>
+ <include>Encryption.proto</include>
+ <include>ErrorHandling.proto</include>
+ <include>FS.proto</include>
+ <include>Filter.proto</include>
+ <include>HBase.proto</include>
+ <include>HFile.proto</include>
+ <include>LoadBalancer.proto</include>
+ <include>MapReduce.proto</include>
+ <include>Master.proto</include>
+ <include>MasterProcedure.proto</include>
+ <include>Procedure.proto</include>
+ <include>Quota.proto</include>
+ <include>RPC.proto</include>
+ <include>RegionNormalizer.proto</include>
+ <include>RegionServerStatus.proto</include>
+ <include>Snapshot.proto</include>
+ <include>Tracing.proto</include>
+ <include>WAL.proto</include>
+ <include>ZooKeeper.proto</include>
+ <include>TestProcedure.proto</include>
+ <include>test.proto</include>
+ <include>test_rpc_service.proto</include>
+ </includes>
+ </source>
+ <output>${sources.dir}</output>
+ </configuration>
+ </execution>
+ </executions>
</plugin>
- </plugins>
- </pluginManagement>
- </build>
-
- <dependencies>
- <!--BE CAREFUL! Any dependency added here needs to be
- excluded above in the shade plugin else the dependency
- will get bundled here-->
- <!-- Intra-project dependencies -->
- <dependency>
- <groupId>org.apache.hbase</groupId>
- <artifactId>hbase-annotations</artifactId>
- <exclusions>
- <exclusion>
- <groupId>jdk.tools</groupId>
- <artifactId>jdk.tools</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <!-- General dependencies -->
- <dependency>
- <groupId>com.google.protobuf</groupId>
- <artifactId>protobuf-java</artifactId>
- <version>${internal.protobuf.version}</version>
- </dependency>
- <dependency>
- <groupId>commons-logging</groupId>
- <artifactId>commons-logging</artifactId>
- </dependency>
- </dependencies>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>3.0.2</version>
+ <configuration>
+ <finalName>${jar.finalName}</finalName>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <version>2.4.3</version>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>shade</goal>
+ </goals>
+ <configuration>
+ <shadeSourcesContent>true</shadeSourcesContent>
+ <createSourcesJar>true</createSourcesJar>
+ <relocations>
+ <relocation>
+ <pattern>com.google.protobuf</pattern>
+ <shadedPattern>org.apache.hadoop.hbase.shaded.com.google.protobuf</shadedPattern>
+ </relocation>
+ </relocations>
+ <!-- What I got when I did a mvn dependency:list for this
+ module. Exclude all but the protobuf
+ [INFO] commons-logging:commons-logging:jar:1.2:compile
+ [INFO] com.github.stephenc.findbugs:findbugs-annotations:jar:1.3.9-1:compile
+ [INFO] log4j:log4j:jar:1.2.17:compile
+ [INFO] com.google.protobuf:protobuf-java:jar:2.5.0:compile
+ [INFO] org.hamcrest:hamcrest-core:jar:1.3:test
+ [INFO] org.mockito:mockito-all:jar:1.10.8:test
+ [INFO] junit:junit:jar:4.12:compile
+ [INFO] org.apache.hbase:hbase-annotations:jar:2.0.0-SNAPSHOT:compile
- <profiles>
- <!-- Skip the tests in this module -->
- <profile>
- <id>skip-protocol-shaded-tests</id>
- <activation>
- <property>
- <name>skip-protocol-shaded-tests</name>
- </property>
- </activation>
- <properties>
- <surefire.skipFirstPart>true</surefire.skipFirstPart>
- </properties>
- </profile>
- <profile>
- <id>compile-protobuf</id>
- <activation>
- <property>
- <name>compile-protobuf</name>
- </property>
- </activation>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-maven-plugins</artifactId>
+ The list below must exlude all of the above except protobuf.
+ -->
+ <artifactSet>
+ <excludes>
+ <exclude>commons-logging:commons-logging</exclude>
+ <exclude>com.github.stephenc.findbugs:findbugs-annotations</exclude>
+ <exclude>log4j:log4j</exclude>
+ <exclude>org.hamcrest:hamcrest-core</exclude>
+ <exclude>org.mockito:mockito-all</exclude>
+ <exclude>junit:junit</exclude>
+ <exclude>org.apache.hbase:hbase-annotations</exclude>
+ </excludes>
+ </artifactSet>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <!--Now unpack the shaded jar made above so the shaded classes
+ are available to subsequent modules-->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.10</version>
+ <executions>
+ <execution>
+ <id>unpack</id>
+ <phase>package</phase>
+ <goals>
+ <goal>unpack</goal>
+ </goals>
<configuration>
- <protocVersion>${internal.protobuf.version}</protocVersion>
+ <artifactItems>
+ <artifactItem>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>${project.artifactId}</artifactId>
+ <version>${project.version}</version>
+ <classifier>sources</classifier>
+ <type>jar</type>
+ <overWrite>true</overWrite>
+ <outputDirectory>${default.sources.dir}</outputDirectory>
+ <includes>**/*.java</includes>
+ </artifactItem>
+ </artifactItems>
</configuration>
- <executions>
- <execution>
- <id>compile-protoc</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>protoc</goal>
- </goals>
- <configuration>
- <imports>
- <param>${basedir}/src/main/protobuf</param>
- </imports>
- <source>
- <directory>${basedir}/src/main/protobuf</directory>
- <!-- Unfortunately, Hadoop plugin does not support *.proto.
- We have to individually list every proto file here -->
- <includes>
- <include>Admin.proto</include>
- <include>Cell.proto</include>
- <include>Client.proto</include>
- <include>ClusterId.proto</include>
- <include>ClusterStatus.proto</include>
- <include>Comparator.proto</include>
- <include>Encryption.proto</include>
- <include>ErrorHandling.proto</include>
- <include>FS.proto</include>
- <include>Filter.proto</include>
- <include>HBase.proto</include>
- <include>HFile.proto</include>
- <include>LoadBalancer.proto</include>
- <include>MapReduce.proto</include>
- <include>Master.proto</include>
- <include>MasterProcedure.proto</include>
- <include>Procedure.proto</include>
- <include>Quota.proto</include>
- <include>RPC.proto</include>
- <include>RegionNormalizer.proto</include>
- <include>RegionServerStatus.proto</include>
- <include>Snapshot.proto</include>
- <include>Tracing.proto</include>
- <include>WAL.proto</include>
- <include>ZooKeeper.proto</include>
- <include>TestProcedure.proto</include>
- <include>test.proto</include>
- <include>test_rpc_service.proto</include>
- </includes>
- </source>
- <!--<output>${project.build.directory}/generated-sources/java</output>-->
- <output>${basedir}/src/main/java/</output>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- </profile>
- </profiles>
+ </execution>
+ </executions>
+ </plugin>
+ <!--Patch the files here!!!
+ Use maven-patch-plugin
+ -->
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
</project>
http://git-wip-us.apache.org/repos/asf/hbase/blob/32be831c/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java
index 641bde3..2e78905 100644
--- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java
+++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java
@@ -6,17 +6,17 @@ package org.apache.hadoop.hbase.shaded.ipc.protobuf.generated;
public final class TestProcedureProtos {
private TestProcedureProtos() {}
public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistryLite registry) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
- (com.google.protobuf.ExtensionRegistryLite) registry);
+ (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
}
public interface TestTableDDLStateDataOrBuilder extends
// @@protoc_insertion_point(interface_extends:TestTableDDLStateData)
- com.google.protobuf.MessageOrBuilder {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
/**
* <code>required string table_name = 1;</code>
@@ -29,18 +29,18 @@ public final class TestProcedureProtos {
/**
* <code>required string table_name = 1;</code>
*/
- com.google.protobuf.ByteString
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getTableNameBytes();
}
/**
* Protobuf type {@code TestTableDDLStateData}
*/
public static final class TestTableDDLStateData extends
- com.google.protobuf.GeneratedMessageV3 implements
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:TestTableDDLStateData)
TestTableDDLStateDataOrBuilder {
// Use TestTableDDLStateData.newBuilder() to construct.
- private TestTableDDLStateData(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+ private TestTableDDLStateData(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TestTableDDLStateData() {
@@ -48,18 +48,18 @@ public final class TestProcedureProtos {
}
@java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
+ public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TestTableDDLStateData(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
@@ -76,29 +76,29 @@ public final class TestProcedureProtos {
break;
}
case 10: {
- com.google.protobuf.ByteString bs = input.readBytes();
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000001;
tableName_ = bs;
break;
}
}
}
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
+ throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor;
}
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -122,8 +122,8 @@ public final class TestProcedureProtos {
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
tableName_ = s;
@@ -134,17 +134,17 @@ public final class TestProcedureProtos {
/**
* <code>required string table_name = 1;</code>
*/
- public com.google.protobuf.ByteString
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getTableNameBytes() {
java.lang.Object ref = tableName_;
if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
tableName_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
@@ -162,10 +162,10 @@ public final class TestProcedureProtos {
return true;
}
- public void writeTo(com.google.protobuf.CodedOutputStream output)
+ public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- com.google.protobuf.GeneratedMessageV3.writeString(output, 1, tableName_);
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, tableName_);
}
unknownFields.writeTo(output);
}
@@ -176,7 +176,7 @@ public final class TestProcedureProtos {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, tableName_);
+ size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, tableName_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
@@ -221,61 +221,61 @@ public final class TestProcedureProtos {
}
public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom(
java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
- com.google.protobuf.CodedInputStream input)
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
- return com.google.protobuf.GeneratedMessageV3
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@@ -293,7 +293,7 @@ public final class TestProcedureProtos {
@java.lang.Override
protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
@@ -301,15 +301,15 @@ public final class TestProcedureProtos {
* Protobuf type {@code TestTableDDLStateData}
*/
public static final class Builder extends
- com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:TestTableDDLStateData)
org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateDataOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
+ public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor;
}
- protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable
.ensureFieldAccessorsInitialized(
@@ -322,12 +322,12 @@ public final class TestProcedureProtos {
}
private Builder(
- com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessageV3
+ if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@@ -338,7 +338,7 @@ public final class TestProcedureProtos {
return this;
}
- public com.google.protobuf.Descriptors.Descriptor
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor;
}
@@ -372,29 +372,29 @@ public final class TestProcedureProtos {
return (Builder) super.clone();
}
public Builder setField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
- com.google.protobuf.Descriptors.FieldDescriptor field) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
- com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
- com.google.protobuf.Descriptors.FieldDescriptor field,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
- public Builder mergeFrom(com.google.protobuf.Message other) {
+ public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) {
return mergeFrom((org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData)other);
} else {
@@ -423,13 +423,13 @@ public final class TestProcedureProtos {
}
public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
@@ -454,8 +454,8 @@ public final class TestProcedureProtos {
public java.lang.String getTableName() {
java.lang.Object ref = tableName_;
if (!(ref instanceof java.lang.String)) {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
tableName_ = s;
@@ -468,17 +468,17 @@ public final class TestProcedureProtos {
/**
* <code>required string table_name = 1;</code>
*/
- public com.google.protobuf.ByteString
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
getTableNameBytes() {
java.lang.Object ref = tableName_;
if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
tableName_ = b;
return b;
} else {
- return (com.google.protobuf.ByteString) ref;
+ return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
}
}
/**
@@ -507,7 +507,7 @@ public final class TestProcedureProtos {
* <code>required string table_name = 1;</code>
*/
public Builder setTableNameBytes(
- com.google.protobuf.ByteString value) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
@@ -517,12 +517,12 @@ public final class TestProcedureProtos {
return this;
}
public final Builder setUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
+ final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(
- final com.google.protobuf.UnknownFieldSet unknownFields) {
+ final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
@@ -540,22 +540,22 @@ public final class TestProcedureProtos {
return DEFAULT_INSTANCE;
}
- @java.lang.Deprecated public static final com.google.protobuf.Parser<TestTableDDLStateData>
- PARSER = new com.google.protobuf.AbstractParser<TestTableDDLStateData>() {
+ @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TestTableDDLStateData>
+ PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<TestTableDDLStateData>() {
public TestTableDDLStateData parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
return new TestTableDDLStateData(input, extensionRegistry);
}
};
- public static com.google.protobuf.Parser<TestTableDDLStateData> parser() {
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TestTableDDLStateData> parser() {
return PARSER;
}
@java.lang.Override
- public com.google.protobuf.Parser<TestTableDDLStateData> getParserForType() {
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TestTableDDLStateData> getParserForType() {
return PARSER;
}
@@ -565,17 +565,17 @@ public final class TestProcedureProtos {
}
- private static final com.google.protobuf.Descriptors.Descriptor
+ private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
internal_static_TestTableDDLStateData_descriptor;
private static final
- com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_TestTableDDLStateData_fieldAccessorTable;
- public static com.google.protobuf.Descriptors.FileDescriptor
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
- private static com.google.protobuf.Descriptors.FileDescriptor
+ private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
@@ -584,22 +584,22 @@ public final class TestProcedureProtos {
".hadoop.hbase.shaded.ipc.protobuf.genera" +
"tedB\023TestProcedureProtos\210\001\001"
};
- com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
- public com.google.protobuf.ExtensionRegistry assignDescriptors(
- com.google.protobuf.Descriptors.FileDescriptor root) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
- com.google.protobuf.Descriptors.FileDescriptor
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
- new com.google.protobuf.Descriptors.FileDescriptor[] {
+ new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
internal_static_TestTableDDLStateData_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_TestTableDDLStateData_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_TestTableDDLStateData_descriptor,
new java.lang.String[] { "TableName", });
}