You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by an...@apache.org on 2019/03/15 08:29:41 UTC
[hive] branch master updated: HIVE-21362: Add an input format and
serde to read from protobuf files. (Harish JP, reviewed by Jason Dere)
This is an automated email from the ASF dual-hosted git repository.
anishek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new bc79adf HIVE-21362: Add an input format and serde to read from protobuf files. (Harish JP, reviewed by Jason Dere)
bc79adf is described below
commit bc79adfaa7ee26e489a139e822616967a39396ba
Author: Anishek Agarwal <an...@gmail.com>
AuthorDate: Fri Mar 15 13:59:27 2019 +0530
HIVE-21362: Add an input format and serde to read from protobuf files. (Harish JP, reviewed by Jason Dere)
---
contrib/pom.xml | 60 +
.../hadoop/hive/contrib/serde2/SampleProtos.java | 4621 ++++++++++++++++++++
.../contrib/input/ProtobufMessageInputFormat.java | 128 +
.../hadoop/hive/contrib/input}/package-info.java | 4 +-
.../contrib/serde2/ProtobufBytesWritableSerDe.java | 62 +
.../hive/contrib/serde2/ProtobufMessageSerDe.java | 18 +-
.../hadoop/hive/contrib/serde2/ProtobufSerDe.java | 377 ++
contrib/src/protobuf-test/SampleProtos.proto | 57 +
.../hive/contrib/serde2/TestProtoMessageSerDe.java | 247 ++
.../TestCachedStoreUpdateUsingEvents.java | 0
.../metastore/tools/metatool/package-info.java | 2 +-
11 files changed, 5571 insertions(+), 5 deletions(-)
diff --git a/contrib/pom.xml b/contrib/pom.xml
index df5e5d7..d569645 100644
--- a/contrib/pom.xml
+++ b/contrib/pom.xml
@@ -49,6 +49,11 @@
<artifactId>hive-shims</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>com.google.protobuf</groupId>
+ <artifactId>protobuf-java</artifactId>
+ <version>${protobuf.version}</version>
+ </dependency>
<!-- inter-project -->
<dependency>
<groupId>commons-codec</groupId>
@@ -80,6 +85,61 @@
<build>
<sourceDirectory>${basedir}/src/java</sourceDirectory>
<testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>add-test-sources</id>
+ <phase>generate-test-sources</phase>
+ <goals>
+ <goal>add-test-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>src/gen-test/protobuf/gen-java</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
</build>
+ <profiles>
+ <profile>
+ <id>protobuf</id>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>generate-protobuf-test-sources</id>
+ <phase>generate-test-sources</phase>
+ <configuration>
+ <target>
+ <property name="protobuf.src.dir" location="${basedir}/src/protobuf-test"/>
+ <property name="protobuf.build.dir" location="${basedir}/src/gen-test/protobuf/gen-java"/>
+ <echo>Building contrib Protobuf</echo>
+ <mkdir dir="${protobuf.build.dir}"/>
+ <exec executable="protoc" failonerror="true">
+ <arg value="--java_out=${protobuf.build.dir}"/>
+ <arg value="-I=${protobuf.src.dir}"/>
+ <arg value="${protobuf.src.dir}/SampleProtos.proto"/>
+ </exec>
+ </target>
+ </configuration>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
</project>
diff --git a/contrib/src/gen-test/protobuf/gen-java/org/apache/hadoop/hive/contrib/serde2/SampleProtos.java b/contrib/src/gen-test/protobuf/gen-java/org/apache/hadoop/hive/contrib/serde2/SampleProtos.java
new file mode 100644
index 0000000..8c20e22
--- /dev/null
+++ b/contrib/src/gen-test/protobuf/gen-java/org/apache/hadoop/hive/contrib/serde2/SampleProtos.java
@@ -0,0 +1,4621 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: SampleProtos.proto
+
+package org.apache.hadoop.hive.contrib.serde2;
+
+public final class SampleProtos {
+ private SampleProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface MapFieldEntryOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional string key = 1;
+ /**
+ * <code>optional string key = 1;</code>
+ */
+ boolean hasKey();
+ /**
+ * <code>optional string key = 1;</code>
+ */
+ java.lang.String getKey();
+ /**
+ * <code>optional string key = 1;</code>
+ */
+ com.google.protobuf.ByteString
+ getKeyBytes();
+
+ // optional string value = 2;
+ /**
+ * <code>optional string value = 2;</code>
+ */
+ boolean hasValue();
+ /**
+ * <code>optional string value = 2;</code>
+ */
+ java.lang.String getValue();
+ /**
+ * <code>optional string value = 2;</code>
+ */
+ com.google.protobuf.ByteString
+ getValueBytes();
+ }
+ /**
+ * Protobuf type {@code MapFieldEntry}
+ */
+ public static final class MapFieldEntry extends
+ com.google.protobuf.GeneratedMessage
+ implements MapFieldEntryOrBuilder {
+ // Use MapFieldEntry.newBuilder() to construct.
+ private MapFieldEntry(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private MapFieldEntry(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final MapFieldEntry defaultInstance;
+ public static MapFieldEntry getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public MapFieldEntry getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private MapFieldEntry(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ key_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ value_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.class, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<MapFieldEntry> PARSER =
+ new com.google.protobuf.AbstractParser<MapFieldEntry>() {
+ public MapFieldEntry parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new MapFieldEntry(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<MapFieldEntry> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional string key = 1;
+ public static final int KEY_FIELD_NUMBER = 1;
+ private java.lang.Object key_;
+ /**
+ * <code>optional string key = 1;</code>
+ */
+ public boolean hasKey() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional string key = 1;</code>
+ */
+ public java.lang.String getKey() {
+ java.lang.Object ref = key_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ key_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string key = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getKeyBytes() {
+ java.lang.Object ref = key_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ key_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional string value = 2;
+ public static final int VALUE_FIELD_NUMBER = 2;
+ private java.lang.Object value_;
+ /**
+ * <code>optional string value = 2;</code>
+ */
+ public boolean hasValue() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional string value = 2;</code>
+ */
+ public java.lang.String getValue() {
+ java.lang.Object ref = value_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ value_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string value = 2;</code>
+ */
+ public com.google.protobuf.ByteString
+ getValueBytes() {
+ java.lang.Object ref = value_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ value_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ key_ = "";
+ value_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getKeyBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, getValueBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getKeyBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, getValueBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code MapFieldEntry}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.class, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ key_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ value_ = "";
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_descriptor;
+ }
+
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getDefaultInstanceForType() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry build() {
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry buildPartial() {
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry result = new org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.key_ = key_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.value_ = value_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry) {
+ return mergeFrom((org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry other) {
+ if (other == org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance()) return this;
+ if (other.hasKey()) {
+ bitField0_ |= 0x00000001;
+ key_ = other.key_;
+ onChanged();
+ }
+ if (other.hasValue()) {
+ bitField0_ |= 0x00000002;
+ value_ = other.value_;
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional string key = 1;
+ private java.lang.Object key_ = "";
+ /**
+ * <code>optional string key = 1;</code>
+ */
+ public boolean hasKey() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional string key = 1;</code>
+ */
+ public java.lang.String getKey() {
+ java.lang.Object ref = key_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ key_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string key = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getKeyBytes() {
+ java.lang.Object ref = key_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ key_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string key = 1;</code>
+ */
+ public Builder setKey(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ key_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string key = 1;</code>
+ */
+ public Builder clearKey() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ key_ = getDefaultInstance().getKey();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string key = 1;</code>
+ */
+ public Builder setKeyBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ key_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional string value = 2;
+ private java.lang.Object value_ = "";
+ /**
+ * <code>optional string value = 2;</code>
+ */
+ public boolean hasValue() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional string value = 2;</code>
+ */
+ public java.lang.String getValue() {
+ java.lang.Object ref = value_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ value_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string value = 2;</code>
+ */
+ public com.google.protobuf.ByteString
+ getValueBytes() {
+ java.lang.Object ref = value_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ value_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string value = 2;</code>
+ */
+ public Builder setValue(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ value_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string value = 2;</code>
+ */
+ public Builder clearValue() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ value_ = getDefaultInstance().getValue();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string value = 2;</code>
+ */
+ public Builder setValueBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ value_ = value;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:MapFieldEntry)
+ }
+
+ static {
+ defaultInstance = new MapFieldEntry(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:MapFieldEntry)
+ }
+
+ public interface Mesg1OrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .MapFieldEntry anotherMap = 1;
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>
+ getAnotherMapList();
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getAnotherMap(int index);
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ int getAnotherMapCount();
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ java.util.List<? extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ getAnotherMapOrBuilderList();
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getAnotherMapOrBuilder(
+ int index);
+
+ // optional .MapFieldEntry noMap = 2;
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ boolean hasNoMap();
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getNoMap();
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getNoMapOrBuilder();
+
+ // repeated int32 intList = 3;
+ /**
+ * <code>repeated int32 intList = 3;</code>
+ */
+ java.util.List<java.lang.Integer> getIntListList();
+ /**
+ * <code>repeated int32 intList = 3;</code>
+ */
+ int getIntListCount();
+ /**
+ * <code>repeated int32 intList = 3;</code>
+ */
+ int getIntList(int index);
+ }
+ /**
+ * Protobuf type {@code Mesg1}
+ */
+ public static final class Mesg1 extends
+ com.google.protobuf.GeneratedMessage
+ implements Mesg1OrBuilder {
+ // Use Mesg1.newBuilder() to construct.
+ private Mesg1(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private Mesg1(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final Mesg1 defaultInstance;
+ public static Mesg1 getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public Mesg1 getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private Mesg1(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ anotherMap_ = new java.util.ArrayList<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>();
+ mutable_bitField0_ |= 0x00000001;
+ }
+ anotherMap_.add(input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.PARSER, extensionRegistry));
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = noMap_.toBuilder();
+ }
+ noMap_ = input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(noMap_);
+ noMap_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ case 24: {
+ if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+ intList_ = new java.util.ArrayList<java.lang.Integer>();
+ mutable_bitField0_ |= 0x00000004;
+ }
+ intList_.add(input.readInt32());
+ break;
+ }
+ case 26: {
+ int length = input.readRawVarint32();
+ int limit = input.pushLimit(length);
+ if (!((mutable_bitField0_ & 0x00000004) == 0x00000004) && input.getBytesUntilLimit() > 0) {
+ intList_ = new java.util.ArrayList<java.lang.Integer>();
+ mutable_bitField0_ |= 0x00000004;
+ }
+ while (input.getBytesUntilLimit() > 0) {
+ intList_.add(input.readInt32());
+ }
+ input.popLimit(limit);
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ anotherMap_ = java.util.Collections.unmodifiableList(anotherMap_);
+ }
+ if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+ intList_ = java.util.Collections.unmodifiableList(intList_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.class, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<Mesg1> PARSER =
+ new com.google.protobuf.AbstractParser<Mesg1>() {
+ public Mesg1 parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new Mesg1(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<Mesg1> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // repeated .MapFieldEntry anotherMap = 1;
+ public static final int ANOTHERMAP_FIELD_NUMBER = 1;
+ private java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> anotherMap_;
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> getAnotherMapList() {
+ return anotherMap_;
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ getAnotherMapOrBuilderList() {
+ return anotherMap_;
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public int getAnotherMapCount() {
+ return anotherMap_.size();
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getAnotherMap(int index) {
+ return anotherMap_.get(index);
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getAnotherMapOrBuilder(
+ int index) {
+ return anotherMap_.get(index);
+ }
+
+ // optional .MapFieldEntry noMap = 2;
+ public static final int NOMAP_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry noMap_;
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ public boolean hasNoMap() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getNoMap() {
+ return noMap_;
+ }
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getNoMapOrBuilder() {
+ return noMap_;
+ }
+
+ // repeated int32 intList = 3;
+ public static final int INTLIST_FIELD_NUMBER = 3;
+ private java.util.List<java.lang.Integer> intList_;
+ /**
+ * <code>repeated int32 intList = 3;</code>
+ */
+ public java.util.List<java.lang.Integer>
+ getIntListList() {
+ return intList_;
+ }
+ /**
+ * <code>repeated int32 intList = 3;</code>
+ */
+ public int getIntListCount() {
+ return intList_.size();
+ }
+ /**
+ * <code>repeated int32 intList = 3;</code>
+ */
+ public int getIntList(int index) {
+ return intList_.get(index);
+ }
+
+ private void initFields() {
+ anotherMap_ = java.util.Collections.emptyList();
+ noMap_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+ intList_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < anotherMap_.size(); i++) {
+ output.writeMessage(1, anotherMap_.get(i));
+ }
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(2, noMap_);
+ }
+ for (int i = 0; i < intList_.size(); i++) {
+ output.writeInt32(3, intList_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < anotherMap_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, anotherMap_.get(i));
+ }
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, noMap_);
+ }
+ {
+ int dataSize = 0;
+ for (int i = 0; i < intList_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeInt32SizeNoTag(intList_.get(i));
+ }
+ size += dataSize;
+ size += 1 * getIntListList().size();
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code Mesg1}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.class, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getAnotherMapFieldBuilder();
+ getNoMapFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (anotherMapBuilder_ == null) {
+ anotherMap_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ anotherMapBuilder_.clear();
+ }
+ if (noMapBuilder_ == null) {
+ noMap_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+ } else {
+ noMapBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ intList_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_descriptor;
+ }
+
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getDefaultInstanceForType() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 build() {
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 buildPartial() {
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 result = new org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (anotherMapBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ anotherMap_ = java.util.Collections.unmodifiableList(anotherMap_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.anotherMap_ = anotherMap_;
+ } else {
+ result.anotherMap_ = anotherMapBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (noMapBuilder_ == null) {
+ result.noMap_ = noMap_;
+ } else {
+ result.noMap_ = noMapBuilder_.build();
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ intList_ = java.util.Collections.unmodifiableList(intList_);
+ bitField0_ = (bitField0_ & ~0x00000004);
+ }
+ result.intList_ = intList_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1) {
+ return mergeFrom((org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 other) {
+ if (other == org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance()) return this;
+ if (anotherMapBuilder_ == null) {
+ if (!other.anotherMap_.isEmpty()) {
+ if (anotherMap_.isEmpty()) {
+ anotherMap_ = other.anotherMap_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureAnotherMapIsMutable();
+ anotherMap_.addAll(other.anotherMap_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.anotherMap_.isEmpty()) {
+ if (anotherMapBuilder_.isEmpty()) {
+ anotherMapBuilder_.dispose();
+ anotherMapBuilder_ = null;
+ anotherMap_ = other.anotherMap_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ anotherMapBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getAnotherMapFieldBuilder() : null;
+ } else {
+ anotherMapBuilder_.addAllMessages(other.anotherMap_);
+ }
+ }
+ }
+ if (other.hasNoMap()) {
+ mergeNoMap(other.getNoMap());
+ }
+ if (!other.intList_.isEmpty()) {
+ if (intList_.isEmpty()) {
+ intList_ = other.intList_;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ } else {
+ ensureIntListIsMutable();
+ intList_.addAll(other.intList_);
+ }
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // repeated .MapFieldEntry anotherMap = 1;
+ private java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> anotherMap_ =
+ java.util.Collections.emptyList();
+ private void ensureAnotherMapIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ anotherMap_ = new java.util.ArrayList<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>(anotherMap_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> anotherMapBuilder_;
+
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> getAnotherMapList() {
+ if (anotherMapBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(anotherMap_);
+ } else {
+ return anotherMapBuilder_.getMessageList();
+ }
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public int getAnotherMapCount() {
+ if (anotherMapBuilder_ == null) {
+ return anotherMap_.size();
+ } else {
+ return anotherMapBuilder_.getCount();
+ }
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getAnotherMap(int index) {
+ if (anotherMapBuilder_ == null) {
+ return anotherMap_.get(index);
+ } else {
+ return anotherMapBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public Builder setAnotherMap(
+ int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ if (anotherMapBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureAnotherMapIsMutable();
+ anotherMap_.set(index, value);
+ onChanged();
+ } else {
+ anotherMapBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public Builder setAnotherMap(
+ int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ if (anotherMapBuilder_ == null) {
+ ensureAnotherMapIsMutable();
+ anotherMap_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ anotherMapBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public Builder addAnotherMap(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ if (anotherMapBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureAnotherMapIsMutable();
+ anotherMap_.add(value);
+ onChanged();
+ } else {
+ anotherMapBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public Builder addAnotherMap(
+ int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ if (anotherMapBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureAnotherMapIsMutable();
+ anotherMap_.add(index, value);
+ onChanged();
+ } else {
+ anotherMapBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public Builder addAnotherMap(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ if (anotherMapBuilder_ == null) {
+ ensureAnotherMapIsMutable();
+ anotherMap_.add(builderForValue.build());
+ onChanged();
+ } else {
+ anotherMapBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public Builder addAnotherMap(
+ int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ if (anotherMapBuilder_ == null) {
+ ensureAnotherMapIsMutable();
+ anotherMap_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ anotherMapBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public Builder addAllAnotherMap(
+ java.lang.Iterable<? extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> values) {
+ if (anotherMapBuilder_ == null) {
+ ensureAnotherMapIsMutable();
+ super.addAll(values, anotherMap_);
+ onChanged();
+ } else {
+ anotherMapBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public Builder clearAnotherMap() {
+ if (anotherMapBuilder_ == null) {
+ anotherMap_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ anotherMapBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public Builder removeAnotherMap(int index) {
+ if (anotherMapBuilder_ == null) {
+ ensureAnotherMapIsMutable();
+ anotherMap_.remove(index);
+ onChanged();
+ } else {
+ anotherMapBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder getAnotherMapBuilder(
+ int index) {
+ return getAnotherMapFieldBuilder().getBuilder(index);
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getAnotherMapOrBuilder(
+ int index) {
+ if (anotherMapBuilder_ == null) {
+ return anotherMap_.get(index); } else {
+ return anotherMapBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ getAnotherMapOrBuilderList() {
+ if (anotherMapBuilder_ != null) {
+ return anotherMapBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(anotherMap_);
+ }
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder addAnotherMapBuilder() {
+ return getAnotherMapFieldBuilder().addBuilder(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder addAnotherMapBuilder(
+ int index) {
+ return getAnotherMapFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .MapFieldEntry anotherMap = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder>
+ getAnotherMapBuilderList() {
+ return getAnotherMapFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ getAnotherMapFieldBuilder() {
+ if (anotherMapBuilder_ == null) {
+ anotherMapBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>(
+ anotherMap_,
+ ((bitField0_ & 0x00000001) == 0x00000001),
+ getParentForChildren(),
+ isClean());
+ anotherMap_ = null;
+ }
+ return anotherMapBuilder_;
+ }
+
+ // optional .MapFieldEntry noMap = 2;
+ private org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry noMap_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> noMapBuilder_;
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ public boolean hasNoMap() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getNoMap() {
+ if (noMapBuilder_ == null) {
+ return noMap_;
+ } else {
+ return noMapBuilder_.getMessage();
+ }
+ }
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ public Builder setNoMap(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ if (noMapBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ noMap_ = value;
+ onChanged();
+ } else {
+ noMapBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ public Builder setNoMap(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ if (noMapBuilder_ == null) {
+ noMap_ = builderForValue.build();
+ onChanged();
+ } else {
+ noMapBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ public Builder mergeNoMap(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ if (noMapBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002) &&
+ noMap_ != org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance()) {
+ noMap_ =
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.newBuilder(noMap_).mergeFrom(value).buildPartial();
+ } else {
+ noMap_ = value;
+ }
+ onChanged();
+ } else {
+ noMapBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ public Builder clearNoMap() {
+ if (noMapBuilder_ == null) {
+ noMap_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+ onChanged();
+ } else {
+ noMapBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder getNoMapBuilder() {
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return getNoMapFieldBuilder().getBuilder();
+ }
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getNoMapOrBuilder() {
+ if (noMapBuilder_ != null) {
+ return noMapBuilder_.getMessageOrBuilder();
+ } else {
+ return noMap_;
+ }
+ }
+ /**
+ * <code>optional .MapFieldEntry noMap = 2;</code>
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ getNoMapFieldBuilder() {
+ if (noMapBuilder_ == null) {
+ noMapBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>(
+ noMap_,
+ getParentForChildren(),
+ isClean());
+ noMap_ = null;
+ }
+ return noMapBuilder_;
+ }
+
+ // repeated int32 intList = 3;
+ private java.util.List<java.lang.Integer> intList_ = java.util.Collections.emptyList();
+ private void ensureIntListIsMutable() {
+ if (!((bitField0_ & 0x00000004) == 0x00000004)) {
+ intList_ = new java.util.ArrayList<java.lang.Integer>(intList_);
+ bitField0_ |= 0x00000004;
+ }
+ }
+ /**
+ * <code>repeated int32 intList = 3;</code>
+ */
+ public java.util.List<java.lang.Integer>
+ getIntListList() {
+ return java.util.Collections.unmodifiableList(intList_);
+ }
+ /**
+ * <code>repeated int32 intList = 3;</code>
+ */
+ public int getIntListCount() {
+ return intList_.size();
+ }
+ /**
+ * <code>repeated int32 intList = 3;</code>
+ */
+ public int getIntList(int index) {
+ return intList_.get(index);
+ }
+ /**
+ * <code>repeated int32 intList = 3;</code>
+ */
+ public Builder setIntList(
+ int index, int value) {
+ ensureIntListIsMutable();
+ intList_.set(index, value);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated int32 intList = 3;</code>
+ */
+ public Builder addIntList(int value) {
+ ensureIntListIsMutable();
+ intList_.add(value);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated int32 intList = 3;</code>
+ */
+ public Builder addAllIntList(
+ java.lang.Iterable<? extends java.lang.Integer> values) {
+ ensureIntListIsMutable();
+ super.addAll(values, intList_);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated int32 intList = 3;</code>
+ */
+ public Builder clearIntList() {
+ intList_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000004);
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:Mesg1)
+ }
+
+ static {
+ defaultInstance = new Mesg1(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:Mesg1)
+ }
+
+ public interface AllTypesOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional double doubleType = 1;
+ /**
+ * <code>optional double doubleType = 1;</code>
+ */
+ boolean hasDoubleType();
+ /**
+ * <code>optional double doubleType = 1;</code>
+ */
+ double getDoubleType();
+
+ // optional float floatType = 2;
+ /**
+ * <code>optional float floatType = 2;</code>
+ */
+ boolean hasFloatType();
+ /**
+ * <code>optional float floatType = 2;</code>
+ */
+ float getFloatType();
+
+ // optional int32 int32Type = 3;
+ /**
+ * <code>optional int32 int32Type = 3;</code>
+ */
+ boolean hasInt32Type();
+ /**
+ * <code>optional int32 int32Type = 3;</code>
+ */
+ int getInt32Type();
+
+ // optional int64 int64Type = 4;
+ /**
+ * <code>optional int64 int64Type = 4;</code>
+ */
+ boolean hasInt64Type();
+ /**
+ * <code>optional int64 int64Type = 4;</code>
+ */
+ long getInt64Type();
+
+ // optional uint32 uint32Type = 5;
+ /**
+ * <code>optional uint32 uint32Type = 5;</code>
+ */
+ boolean hasUint32Type();
+ /**
+ * <code>optional uint32 uint32Type = 5;</code>
+ */
+ int getUint32Type();
+
+ // optional uint64 uint64Type = 6;
+ /**
+ * <code>optional uint64 uint64Type = 6;</code>
+ */
+ boolean hasUint64Type();
+ /**
+ * <code>optional uint64 uint64Type = 6;</code>
+ */
+ long getUint64Type();
+
+ // optional sint32 sint32Type = 7;
+ /**
+ * <code>optional sint32 sint32Type = 7;</code>
+ */
+ boolean hasSint32Type();
+ /**
+ * <code>optional sint32 sint32Type = 7;</code>
+ */
+ int getSint32Type();
+
+ // optional sint64 sint64Type = 8;
+ /**
+ * <code>optional sint64 sint64Type = 8;</code>
+ */
+ boolean hasSint64Type();
+ /**
+ * <code>optional sint64 sint64Type = 8;</code>
+ */
+ long getSint64Type();
+
+ // optional fixed32 fixed32Type = 9;
+ /**
+ * <code>optional fixed32 fixed32Type = 9;</code>
+ */
+ boolean hasFixed32Type();
+ /**
+ * <code>optional fixed32 fixed32Type = 9;</code>
+ */
+ int getFixed32Type();
+
+ // optional fixed64 fixed64Type = 10;
+ /**
+ * <code>optional fixed64 fixed64Type = 10;</code>
+ */
+ boolean hasFixed64Type();
+ /**
+ * <code>optional fixed64 fixed64Type = 10;</code>
+ */
+ long getFixed64Type();
+
+ // optional sfixed32 sfixed32Type = 11;
+ /**
+ * <code>optional sfixed32 sfixed32Type = 11;</code>
+ */
+ boolean hasSfixed32Type();
+ /**
+ * <code>optional sfixed32 sfixed32Type = 11;</code>
+ */
+ int getSfixed32Type();
+
+ // optional sfixed64 sfixed64Type = 12;
+ /**
+ * <code>optional sfixed64 sfixed64Type = 12;</code>
+ */
+ boolean hasSfixed64Type();
+ /**
+ * <code>optional sfixed64 sfixed64Type = 12;</code>
+ */
+ long getSfixed64Type();
+
+ // optional bool boolType = 13;
+ /**
+ * <code>optional bool boolType = 13;</code>
+ */
+ boolean hasBoolType();
+ /**
+ * <code>optional bool boolType = 13;</code>
+ */
+ boolean getBoolType();
+
+ // optional string stringType = 14;
+ /**
+ * <code>optional string stringType = 14;</code>
+ */
+ boolean hasStringType();
+ /**
+ * <code>optional string stringType = 14;</code>
+ */
+ java.lang.String getStringType();
+ /**
+ * <code>optional string stringType = 14;</code>
+ */
+ com.google.protobuf.ByteString
+ getStringTypeBytes();
+
+ // optional bytes bytesType = 15;
+ /**
+ * <code>optional bytes bytesType = 15;</code>
+ */
+ boolean hasBytesType();
+ /**
+ * <code>optional bytes bytesType = 15;</code>
+ */
+ com.google.protobuf.ByteString getBytesType();
+
+ // repeated .MapFieldEntry mapType = 16;
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>
+ getMapTypeList();
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getMapType(int index);
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ int getMapTypeCount();
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ java.util.List<? extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ getMapTypeOrBuilderList();
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getMapTypeOrBuilder(
+ int index);
+
+ // repeated string stringListType = 17;
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ java.util.List<java.lang.String>
+ getStringListTypeList();
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ int getStringListTypeCount();
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ java.lang.String getStringListType(int index);
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ com.google.protobuf.ByteString
+ getStringListTypeBytes(int index);
+
+ // optional .Mesg1 messageType = 18;
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ boolean hasMessageType();
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getMessageType();
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder getMessageTypeOrBuilder();
+
+ // repeated .Mesg1 messageListType = 19;
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1>
+ getMessageListTypeList();
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getMessageListType(int index);
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ int getMessageListTypeCount();
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ java.util.List<? extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>
+ getMessageListTypeOrBuilderList();
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder getMessageListTypeOrBuilder(
+ int index);
+
+ // optional .AllTypes.Enum1 enumType = 20;
+ /**
+ * <code>optional .AllTypes.Enum1 enumType = 20;</code>
+ */
+ boolean hasEnumType();
+ /**
+ * <code>optional .AllTypes.Enum1 enumType = 20;</code>
+ */
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 getEnumType();
+ }
+ /**
+ * Protobuf type {@code AllTypes}
+ */
+ public static final class AllTypes extends
+ com.google.protobuf.GeneratedMessage
+ implements AllTypesOrBuilder {
+ // Use AllTypes.newBuilder() to construct.
+ private AllTypes(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private AllTypes(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final AllTypes defaultInstance;
+ public static AllTypes getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public AllTypes getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private AllTypes(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 9: {
+ bitField0_ |= 0x00000001;
+ doubleType_ = input.readDouble();
+ break;
+ }
+ case 21: {
+ bitField0_ |= 0x00000002;
+ floatType_ = input.readFloat();
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ int32Type_ = input.readInt32();
+ break;
+ }
+ case 32: {
+ bitField0_ |= 0x00000008;
+ int64Type_ = input.readInt64();
+ break;
+ }
+ case 40: {
+ bitField0_ |= 0x00000010;
+ uint32Type_ = input.readUInt32();
+ break;
+ }
+ case 48: {
+ bitField0_ |= 0x00000020;
+ uint64Type_ = input.readUInt64();
+ break;
+ }
+ case 56: {
+ bitField0_ |= 0x00000040;
+ sint32Type_ = input.readSInt32();
+ break;
+ }
+ case 64: {
+ bitField0_ |= 0x00000080;
+ sint64Type_ = input.readSInt64();
+ break;
+ }
+ case 77: {
+ bitField0_ |= 0x00000100;
+ fixed32Type_ = input.readFixed32();
+ break;
+ }
+ case 81: {
+ bitField0_ |= 0x00000200;
+ fixed64Type_ = input.readFixed64();
+ break;
+ }
+ case 93: {
+ bitField0_ |= 0x00000400;
+ sfixed32Type_ = input.readSFixed32();
+ break;
+ }
+ case 97: {
+ bitField0_ |= 0x00000800;
+ sfixed64Type_ = input.readSFixed64();
+ break;
+ }
+ case 104: {
+ bitField0_ |= 0x00001000;
+ boolType_ = input.readBool();
+ break;
+ }
+ case 114: {
+ bitField0_ |= 0x00002000;
+ stringType_ = input.readBytes();
+ break;
+ }
+ case 122: {
+ bitField0_ |= 0x00004000;
+ bytesType_ = input.readBytes();
+ break;
+ }
+ case 130: {
+ if (!((mutable_bitField0_ & 0x00008000) == 0x00008000)) {
+ mapType_ = new java.util.ArrayList<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>();
+ mutable_bitField0_ |= 0x00008000;
+ }
+ mapType_.add(input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.PARSER, extensionRegistry));
+ break;
+ }
+ case 138: {
+ if (!((mutable_bitField0_ & 0x00010000) == 0x00010000)) {
+ stringListType_ = new com.google.protobuf.LazyStringArrayList();
+ mutable_bitField0_ |= 0x00010000;
+ }
+ stringListType_.add(input.readBytes());
+ break;
+ }
+ case 146: {
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder subBuilder = null;
+ if (((bitField0_ & 0x00008000) == 0x00008000)) {
+ subBuilder = messageType_.toBuilder();
+ }
+ messageType_ = input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(messageType_);
+ messageType_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00008000;
+ break;
+ }
+ case 154: {
+ if (!((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
+ messageListType_ = new java.util.ArrayList<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1>();
+ mutable_bitField0_ |= 0x00040000;
+ }
+ messageListType_.add(input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.PARSER, extensionRegistry));
+ break;
+ }
+ case 160: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 value = org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(20, rawValue);
+ } else {
+ bitField0_ |= 0x00010000;
+ enumType_ = value;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00008000) == 0x00008000)) {
+ mapType_ = java.util.Collections.unmodifiableList(mapType_);
+ }
+ if (((mutable_bitField0_ & 0x00010000) == 0x00010000)) {
+ stringListType_ = new com.google.protobuf.UnmodifiableLazyStringList(stringListType_);
+ }
+ if (((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
+ messageListType_ = java.util.Collections.unmodifiableList(messageListType_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.class, org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<AllTypes> PARSER =
+ new com.google.protobuf.AbstractParser<AllTypes>() {
+ public AllTypes parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new AllTypes(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<AllTypes> getParserForType() {
+ return PARSER;
+ }
+
+ /**
+ * Protobuf enum {@code AllTypes.Enum1}
+ */
+ public enum Enum1
+ implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * <code>VAL1 = 1;</code>
+ */
+ VAL1(0, 1),
+ /**
+ * <code>VAL2 = 2;</code>
+ */
+ VAL2(1, 2),
+ ;
+
+ /**
+ * <code>VAL1 = 1;</code>
+ */
+ public static final int VAL1_VALUE = 1;
+ /**
+ * <code>VAL2 = 2;</code>
+ */
+ public static final int VAL2_VALUE = 2;
+
+
+ public final int getNumber() { return value; }
+
+ public static Enum1 valueOf(int value) {
+ switch (value) {
+ case 1: return VAL1;
+ case 2: return VAL2;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap<Enum1>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap<Enum1>
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap<Enum1>() {
+ public Enum1 findValueByNumber(int number) {
+ return Enum1.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final Enum1[] VALUES = values();
+
+ public static Enum1 valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private Enum1(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:AllTypes.Enum1)
+ }
+
+ private int bitField0_;
+ // optional double doubleType = 1;
+ public static final int DOUBLETYPE_FIELD_NUMBER = 1;
+ private double doubleType_;
+ /**
+ * <code>optional double doubleType = 1;</code>
+ */
+ public boolean hasDoubleType() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional double doubleType = 1;</code>
+ */
+ public double getDoubleType() {
+ return doubleType_;
+ }
+
+ // optional float floatType = 2;
+ public static final int FLOATTYPE_FIELD_NUMBER = 2;
+ private float floatType_;
+ /**
+ * <code>optional float floatType = 2;</code>
+ */
+ public boolean hasFloatType() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional float floatType = 2;</code>
+ */
+ public float getFloatType() {
+ return floatType_;
+ }
+
+ // optional int32 int32Type = 3;
+ public static final int INT32TYPE_FIELD_NUMBER = 3;
+ private int int32Type_;
+ /**
+ * <code>optional int32 int32Type = 3;</code>
+ */
+ public boolean hasInt32Type() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional int32 int32Type = 3;</code>
+ */
+ public int getInt32Type() {
+ return int32Type_;
+ }
+
+ // optional int64 int64Type = 4;
+ public static final int INT64TYPE_FIELD_NUMBER = 4;
+ private long int64Type_;
+ /**
+ * <code>optional int64 int64Type = 4;</code>
+ */
+ public boolean hasInt64Type() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional int64 int64Type = 4;</code>
+ */
+ public long getInt64Type() {
+ return int64Type_;
+ }
+
+ // optional uint32 uint32Type = 5;
+ public static final int UINT32TYPE_FIELD_NUMBER = 5;
+ private int uint32Type_;
+ /**
+ * <code>optional uint32 uint32Type = 5;</code>
+ */
+ public boolean hasUint32Type() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * <code>optional uint32 uint32Type = 5;</code>
+ */
+ public int getUint32Type() {
+ return uint32Type_;
+ }
+
+ // optional uint64 uint64Type = 6;
+ public static final int UINT64TYPE_FIELD_NUMBER = 6;
+ private long uint64Type_;
+ /**
+ * <code>optional uint64 uint64Type = 6;</code>
+ */
+ public boolean hasUint64Type() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ /**
+ * <code>optional uint64 uint64Type = 6;</code>
+ */
+ public long getUint64Type() {
+ return uint64Type_;
+ }
+
+ // optional sint32 sint32Type = 7;
+ public static final int SINT32TYPE_FIELD_NUMBER = 7;
+ private int sint32Type_;
+ /**
+ * <code>optional sint32 sint32Type = 7;</code>
+ */
+ public boolean hasSint32Type() {
+ return ((bitField0_ & 0x00000040) == 0x00000040);
+ }
+ /**
+ * <code>optional sint32 sint32Type = 7;</code>
+ */
+ public int getSint32Type() {
+ return sint32Type_;
+ }
+
+ // optional sint64 sint64Type = 8;
+ public static final int SINT64TYPE_FIELD_NUMBER = 8;
+ private long sint64Type_;
+ /**
+ * <code>optional sint64 sint64Type = 8;</code>
+ */
+ public boolean hasSint64Type() {
+ return ((bitField0_ & 0x00000080) == 0x00000080);
+ }
+ /**
+ * <code>optional sint64 sint64Type = 8;</code>
+ */
+ public long getSint64Type() {
+ return sint64Type_;
+ }
+
+ // optional fixed32 fixed32Type = 9;
+ public static final int FIXED32TYPE_FIELD_NUMBER = 9;
+ private int fixed32Type_;
+ /**
+ * <code>optional fixed32 fixed32Type = 9;</code>
+ */
+ public boolean hasFixed32Type() {
+ return ((bitField0_ & 0x00000100) == 0x00000100);
+ }
+ /**
+ * <code>optional fixed32 fixed32Type = 9;</code>
+ */
+ public int getFixed32Type() {
+ return fixed32Type_;
+ }
+
+ // optional fixed64 fixed64Type = 10;
+ public static final int FIXED64TYPE_FIELD_NUMBER = 10;
+ private long fixed64Type_;
+ /**
+ * <code>optional fixed64 fixed64Type = 10;</code>
+ */
+ public boolean hasFixed64Type() {
+ return ((bitField0_ & 0x00000200) == 0x00000200);
+ }
+ /**
+ * <code>optional fixed64 fixed64Type = 10;</code>
+ */
+ public long getFixed64Type() {
+ return fixed64Type_;
+ }
+
+ // optional sfixed32 sfixed32Type = 11;
+ public static final int SFIXED32TYPE_FIELD_NUMBER = 11;
+ private int sfixed32Type_;
+ /**
+ * <code>optional sfixed32 sfixed32Type = 11;</code>
+ */
+ public boolean hasSfixed32Type() {
+ return ((bitField0_ & 0x00000400) == 0x00000400);
+ }
+ /**
+ * <code>optional sfixed32 sfixed32Type = 11;</code>
+ */
+ public int getSfixed32Type() {
+ return sfixed32Type_;
+ }
+
+ // optional sfixed64 sfixed64Type = 12;
+ public static final int SFIXED64TYPE_FIELD_NUMBER = 12;
+ private long sfixed64Type_;
+ /**
+ * <code>optional sfixed64 sfixed64Type = 12;</code>
+ */
+ public boolean hasSfixed64Type() {
+ return ((bitField0_ & 0x00000800) == 0x00000800);
+ }
+ /**
+ * <code>optional sfixed64 sfixed64Type = 12;</code>
+ */
+ public long getSfixed64Type() {
+ return sfixed64Type_;
+ }
+
+ // optional bool boolType = 13;
+ public static final int BOOLTYPE_FIELD_NUMBER = 13;
+ private boolean boolType_;
+ /**
+ * <code>optional bool boolType = 13;</code>
+ */
+ public boolean hasBoolType() {
+ return ((bitField0_ & 0x00001000) == 0x00001000);
+ }
+ /**
+ * <code>optional bool boolType = 13;</code>
+ */
+ public boolean getBoolType() {
+ return boolType_;
+ }
+
+ // optional string stringType = 14;
+ public static final int STRINGTYPE_FIELD_NUMBER = 14;
+ private java.lang.Object stringType_;
+ /**
+ * <code>optional string stringType = 14;</code>
+ */
+ public boolean hasStringType() {
+ return ((bitField0_ & 0x00002000) == 0x00002000);
+ }
+ /**
+ * <code>optional string stringType = 14;</code>
+ */
+ public java.lang.String getStringType() {
+ java.lang.Object ref = stringType_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ stringType_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string stringType = 14;</code>
+ */
+ public com.google.protobuf.ByteString
+ getStringTypeBytes() {
+ java.lang.Object ref = stringType_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ stringType_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional bytes bytesType = 15;
+ public static final int BYTESTYPE_FIELD_NUMBER = 15;
+ private com.google.protobuf.ByteString bytesType_;
+ /**
+ * <code>optional bytes bytesType = 15;</code>
+ */
+ public boolean hasBytesType() {
+ return ((bitField0_ & 0x00004000) == 0x00004000);
+ }
+ /**
+ * <code>optional bytes bytesType = 15;</code>
+ */
+ public com.google.protobuf.ByteString getBytesType() {
+ return bytesType_;
+ }
+
+ // repeated .MapFieldEntry mapType = 16;
+ public static final int MAPTYPE_FIELD_NUMBER = 16;
+ private java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> mapType_;
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> getMapTypeList() {
+ return mapType_;
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ getMapTypeOrBuilderList() {
+ return mapType_;
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public int getMapTypeCount() {
+ return mapType_.size();
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getMapType(int index) {
+ return mapType_.get(index);
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getMapTypeOrBuilder(
+ int index) {
+ return mapType_.get(index);
+ }
+
+ // repeated string stringListType = 17;
+ public static final int STRINGLISTTYPE_FIELD_NUMBER = 17;
+ private com.google.protobuf.LazyStringList stringListType_;
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ public java.util.List<java.lang.String>
+ getStringListTypeList() {
+ return stringListType_;
+ }
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ public int getStringListTypeCount() {
+ return stringListType_.size();
+ }
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ public java.lang.String getStringListType(int index) {
+ return stringListType_.get(index);
+ }
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ public com.google.protobuf.ByteString
+ getStringListTypeBytes(int index) {
+ return stringListType_.getByteString(index);
+ }
+
+ // optional .Mesg1 messageType = 18;
+ public static final int MESSAGETYPE_FIELD_NUMBER = 18;
+ private org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 messageType_;
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ public boolean hasMessageType() {
+ return ((bitField0_ & 0x00008000) == 0x00008000);
+ }
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getMessageType() {
+ return messageType_;
+ }
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder getMessageTypeOrBuilder() {
+ return messageType_;
+ }
+
+ // repeated .Mesg1 messageListType = 19;
+ public static final int MESSAGELISTTYPE_FIELD_NUMBER = 19;
+ private java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1> messageListType_;
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1> getMessageListTypeList() {
+ return messageListType_;
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>
+ getMessageListTypeOrBuilderList() {
+ return messageListType_;
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public int getMessageListTypeCount() {
+ return messageListType_.size();
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getMessageListType(int index) {
+ return messageListType_.get(index);
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder getMessageListTypeOrBuilder(
+ int index) {
+ return messageListType_.get(index);
+ }
+
+ // optional .AllTypes.Enum1 enumType = 20;
+ public static final int ENUMTYPE_FIELD_NUMBER = 20;
+ private org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 enumType_;
+ /**
+ * <code>optional .AllTypes.Enum1 enumType = 20;</code>
+ */
+ public boolean hasEnumType() {
+ return ((bitField0_ & 0x00010000) == 0x00010000);
+ }
+ /**
+ * <code>optional .AllTypes.Enum1 enumType = 20;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 getEnumType() {
+ return enumType_;
+ }
+
+ private void initFields() {
+ doubleType_ = 0D;
+ floatType_ = 0F;
+ int32Type_ = 0;
+ int64Type_ = 0L;
+ uint32Type_ = 0;
+ uint64Type_ = 0L;
+ sint32Type_ = 0;
+ sint64Type_ = 0L;
+ fixed32Type_ = 0;
+ fixed64Type_ = 0L;
+ sfixed32Type_ = 0;
+ sfixed64Type_ = 0L;
+ boolType_ = false;
+ stringType_ = "";
+ bytesType_ = com.google.protobuf.ByteString.EMPTY;
+ mapType_ = java.util.Collections.emptyList();
+ stringListType_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ messageType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+ messageListType_ = java.util.Collections.emptyList();
+ enumType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.VAL1;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeDouble(1, doubleType_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeFloat(2, floatType_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeInt32(3, int32Type_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeInt64(4, int64Type_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ output.writeUInt32(5, uint32Type_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ output.writeUInt64(6, uint64Type_);
+ }
+ if (((bitField0_ & 0x00000040) == 0x00000040)) {
+ output.writeSInt32(7, sint32Type_);
+ }
+ if (((bitField0_ & 0x00000080) == 0x00000080)) {
+ output.writeSInt64(8, sint64Type_);
+ }
+ if (((bitField0_ & 0x00000100) == 0x00000100)) {
+ output.writeFixed32(9, fixed32Type_);
+ }
+ if (((bitField0_ & 0x00000200) == 0x00000200)) {
+ output.writeFixed64(10, fixed64Type_);
+ }
+ if (((bitField0_ & 0x00000400) == 0x00000400)) {
+ output.writeSFixed32(11, sfixed32Type_);
+ }
+ if (((bitField0_ & 0x00000800) == 0x00000800)) {
+ output.writeSFixed64(12, sfixed64Type_);
+ }
+ if (((bitField0_ & 0x00001000) == 0x00001000)) {
+ output.writeBool(13, boolType_);
+ }
+ if (((bitField0_ & 0x00002000) == 0x00002000)) {
+ output.writeBytes(14, getStringTypeBytes());
+ }
+ if (((bitField0_ & 0x00004000) == 0x00004000)) {
+ output.writeBytes(15, bytesType_);
+ }
+ for (int i = 0; i < mapType_.size(); i++) {
+ output.writeMessage(16, mapType_.get(i));
+ }
+ for (int i = 0; i < stringListType_.size(); i++) {
+ output.writeBytes(17, stringListType_.getByteString(i));
+ }
+ if (((bitField0_ & 0x00008000) == 0x00008000)) {
+ output.writeMessage(18, messageType_);
+ }
+ for (int i = 0; i < messageListType_.size(); i++) {
+ output.writeMessage(19, messageListType_.get(i));
+ }
+ if (((bitField0_ & 0x00010000) == 0x00010000)) {
+ output.writeEnum(20, enumType_.getNumber());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeDoubleSize(1, doubleType_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeFloatSize(2, floatType_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt32Size(3, int32Type_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt64Size(4, int64Type_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(5, uint32Type_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(6, uint64Type_);
+ }
+ if (((bitField0_ & 0x00000040) == 0x00000040)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeSInt32Size(7, sint32Type_);
+ }
+ if (((bitField0_ & 0x00000080) == 0x00000080)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeSInt64Size(8, sint64Type_);
+ }
+ if (((bitField0_ & 0x00000100) == 0x00000100)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeFixed32Size(9, fixed32Type_);
+ }
+ if (((bitField0_ & 0x00000200) == 0x00000200)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeFixed64Size(10, fixed64Type_);
+ }
+ if (((bitField0_ & 0x00000400) == 0x00000400)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeSFixed32Size(11, sfixed32Type_);
+ }
+ if (((bitField0_ & 0x00000800) == 0x00000800)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeSFixed64Size(12, sfixed64Type_);
+ }
+ if (((bitField0_ & 0x00001000) == 0x00001000)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(13, boolType_);
+ }
+ if (((bitField0_ & 0x00002000) == 0x00002000)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(14, getStringTypeBytes());
+ }
+ if (((bitField0_ & 0x00004000) == 0x00004000)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(15, bytesType_);
+ }
+ for (int i = 0; i < mapType_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(16, mapType_.get(i));
+ }
+ {
+ int dataSize = 0;
+ for (int i = 0; i < stringListType_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeBytesSizeNoTag(stringListType_.getByteString(i));
+ }
+ size += dataSize;
+ size += 2 * getStringListTypeList().size();
+ }
+ if (((bitField0_ & 0x00008000) == 0x00008000)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(18, messageType_);
+ }
+ for (int i = 0; i < messageListType_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(19, messageListType_.get(i));
+ }
+ if (((bitField0_ & 0x00010000) == 0x00010000)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeEnumSize(20, enumType_.getNumber());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code AllTypes}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypesOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.class, org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getMapTypeFieldBuilder();
+ getMessageTypeFieldBuilder();
+ getMessageListTypeFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ doubleType_ = 0D;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ floatType_ = 0F;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ int32Type_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ int64Type_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ uint32Type_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000010);
+ uint64Type_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000020);
+ sint32Type_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000040);
+ sint64Type_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000080);
+ fixed32Type_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000100);
+ fixed64Type_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000200);
+ sfixed32Type_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000400);
+ sfixed64Type_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000800);
+ boolType_ = false;
+ bitField0_ = (bitField0_ & ~0x00001000);
+ stringType_ = "";
+ bitField0_ = (bitField0_ & ~0x00002000);
+ bytesType_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00004000);
+ if (mapTypeBuilder_ == null) {
+ mapType_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00008000);
+ } else {
+ mapTypeBuilder_.clear();
+ }
+ stringListType_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00010000);
+ if (messageTypeBuilder_ == null) {
+ messageType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+ } else {
+ messageTypeBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00020000);
+ if (messageListTypeBuilder_ == null) {
+ messageListType_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00040000);
+ } else {
+ messageListTypeBuilder_.clear();
+ }
+ enumType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.VAL1;
+ bitField0_ = (bitField0_ & ~0x00080000);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_descriptor;
+ }
+
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes getDefaultInstanceForType() {
+ return org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes build() {
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes buildPartial() {
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes result = new org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.doubleType_ = doubleType_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.floatType_ = floatType_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.int32Type_ = int32Type_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.int64Type_ = int64Type_;
+ if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+ to_bitField0_ |= 0x00000010;
+ }
+ result.uint32Type_ = uint32Type_;
+ if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+ to_bitField0_ |= 0x00000020;
+ }
+ result.uint64Type_ = uint64Type_;
+ if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+ to_bitField0_ |= 0x00000040;
+ }
+ result.sint32Type_ = sint32Type_;
+ if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
+ to_bitField0_ |= 0x00000080;
+ }
+ result.sint64Type_ = sint64Type_;
+ if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
+ to_bitField0_ |= 0x00000100;
+ }
+ result.fixed32Type_ = fixed32Type_;
+ if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
+ to_bitField0_ |= 0x00000200;
+ }
+ result.fixed64Type_ = fixed64Type_;
+ if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
+ to_bitField0_ |= 0x00000400;
+ }
+ result.sfixed32Type_ = sfixed32Type_;
+ if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
+ to_bitField0_ |= 0x00000800;
+ }
+ result.sfixed64Type_ = sfixed64Type_;
+ if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
+ to_bitField0_ |= 0x00001000;
+ }
+ result.boolType_ = boolType_;
+ if (((from_bitField0_ & 0x00002000) == 0x00002000)) {
+ to_bitField0_ |= 0x00002000;
+ }
+ result.stringType_ = stringType_;
+ if (((from_bitField0_ & 0x00004000) == 0x00004000)) {
+ to_bitField0_ |= 0x00004000;
+ }
+ result.bytesType_ = bytesType_;
+ if (mapTypeBuilder_ == null) {
+ if (((bitField0_ & 0x00008000) == 0x00008000)) {
+ mapType_ = java.util.Collections.unmodifiableList(mapType_);
+ bitField0_ = (bitField0_ & ~0x00008000);
+ }
+ result.mapType_ = mapType_;
+ } else {
+ result.mapType_ = mapTypeBuilder_.build();
+ }
+ if (((bitField0_ & 0x00010000) == 0x00010000)) {
+ stringListType_ = new com.google.protobuf.UnmodifiableLazyStringList(
+ stringListType_);
+ bitField0_ = (bitField0_ & ~0x00010000);
+ }
+ result.stringListType_ = stringListType_;
+ if (((from_bitField0_ & 0x00020000) == 0x00020000)) {
+ to_bitField0_ |= 0x00008000;
+ }
+ if (messageTypeBuilder_ == null) {
+ result.messageType_ = messageType_;
+ } else {
+ result.messageType_ = messageTypeBuilder_.build();
+ }
+ if (messageListTypeBuilder_ == null) {
+ if (((bitField0_ & 0x00040000) == 0x00040000)) {
+ messageListType_ = java.util.Collections.unmodifiableList(messageListType_);
+ bitField0_ = (bitField0_ & ~0x00040000);
+ }
+ result.messageListType_ = messageListType_;
+ } else {
+ result.messageListType_ = messageListTypeBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00080000) == 0x00080000)) {
+ to_bitField0_ |= 0x00010000;
+ }
+ result.enumType_ = enumType_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes) {
+ return mergeFrom((org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes other) {
+ if (other == org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.getDefaultInstance()) return this;
+ if (other.hasDoubleType()) {
+ setDoubleType(other.getDoubleType());
+ }
+ if (other.hasFloatType()) {
+ setFloatType(other.getFloatType());
+ }
+ if (other.hasInt32Type()) {
+ setInt32Type(other.getInt32Type());
+ }
+ if (other.hasInt64Type()) {
+ setInt64Type(other.getInt64Type());
+ }
+ if (other.hasUint32Type()) {
+ setUint32Type(other.getUint32Type());
+ }
+ if (other.hasUint64Type()) {
+ setUint64Type(other.getUint64Type());
+ }
+ if (other.hasSint32Type()) {
+ setSint32Type(other.getSint32Type());
+ }
+ if (other.hasSint64Type()) {
+ setSint64Type(other.getSint64Type());
+ }
+ if (other.hasFixed32Type()) {
+ setFixed32Type(other.getFixed32Type());
+ }
+ if (other.hasFixed64Type()) {
+ setFixed64Type(other.getFixed64Type());
+ }
+ if (other.hasSfixed32Type()) {
+ setSfixed32Type(other.getSfixed32Type());
+ }
+ if (other.hasSfixed64Type()) {
+ setSfixed64Type(other.getSfixed64Type());
+ }
+ if (other.hasBoolType()) {
+ setBoolType(other.getBoolType());
+ }
+ if (other.hasStringType()) {
+ bitField0_ |= 0x00002000;
+ stringType_ = other.stringType_;
+ onChanged();
+ }
+ if (other.hasBytesType()) {
+ setBytesType(other.getBytesType());
+ }
+ if (mapTypeBuilder_ == null) {
+ if (!other.mapType_.isEmpty()) {
+ if (mapType_.isEmpty()) {
+ mapType_ = other.mapType_;
+ bitField0_ = (bitField0_ & ~0x00008000);
+ } else {
+ ensureMapTypeIsMutable();
+ mapType_.addAll(other.mapType_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.mapType_.isEmpty()) {
+ if (mapTypeBuilder_.isEmpty()) {
+ mapTypeBuilder_.dispose();
+ mapTypeBuilder_ = null;
+ mapType_ = other.mapType_;
+ bitField0_ = (bitField0_ & ~0x00008000);
+ mapTypeBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getMapTypeFieldBuilder() : null;
+ } else {
+ mapTypeBuilder_.addAllMessages(other.mapType_);
+ }
+ }
+ }
+ if (!other.stringListType_.isEmpty()) {
+ if (stringListType_.isEmpty()) {
+ stringListType_ = other.stringListType_;
+ bitField0_ = (bitField0_ & ~0x00010000);
+ } else {
+ ensureStringListTypeIsMutable();
+ stringListType_.addAll(other.stringListType_);
+ }
+ onChanged();
+ }
+ if (other.hasMessageType()) {
+ mergeMessageType(other.getMessageType());
+ }
+ if (messageListTypeBuilder_ == null) {
+ if (!other.messageListType_.isEmpty()) {
+ if (messageListType_.isEmpty()) {
+ messageListType_ = other.messageListType_;
+ bitField0_ = (bitField0_ & ~0x00040000);
+ } else {
+ ensureMessageListTypeIsMutable();
+ messageListType_.addAll(other.messageListType_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.messageListType_.isEmpty()) {
+ if (messageListTypeBuilder_.isEmpty()) {
+ messageListTypeBuilder_.dispose();
+ messageListTypeBuilder_ = null;
+ messageListType_ = other.messageListType_;
+ bitField0_ = (bitField0_ & ~0x00040000);
+ messageListTypeBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getMessageListTypeFieldBuilder() : null;
+ } else {
+ messageListTypeBuilder_.addAllMessages(other.messageListType_);
+ }
+ }
+ }
+ if (other.hasEnumType()) {
+ setEnumType(other.getEnumType());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional double doubleType = 1;
+ private double doubleType_ ;
+ /**
+ * <code>optional double doubleType = 1;</code>
+ */
+ public boolean hasDoubleType() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional double doubleType = 1;</code>
+ */
+ public double getDoubleType() {
+ return doubleType_;
+ }
+ /**
+ * <code>optional double doubleType = 1;</code>
+ */
+ public Builder setDoubleType(double value) {
+ bitField0_ |= 0x00000001;
+ doubleType_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional double doubleType = 1;</code>
+ */
+ public Builder clearDoubleType() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ doubleType_ = 0D;
+ onChanged();
+ return this;
+ }
+
+ // optional float floatType = 2;
+ private float floatType_ ;
+ /**
+ * <code>optional float floatType = 2;</code>
+ */
+ public boolean hasFloatType() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional float floatType = 2;</code>
+ */
+ public float getFloatType() {
+ return floatType_;
+ }
+ /**
+ * <code>optional float floatType = 2;</code>
+ */
+ public Builder setFloatType(float value) {
+ bitField0_ |= 0x00000002;
+ floatType_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional float floatType = 2;</code>
+ */
+ public Builder clearFloatType() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ floatType_ = 0F;
+ onChanged();
+ return this;
+ }
+
+ // optional int32 int32Type = 3;
+ private int int32Type_ ;
+ /**
+ * <code>optional int32 int32Type = 3;</code>
+ */
+ public boolean hasInt32Type() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional int32 int32Type = 3;</code>
+ */
+ public int getInt32Type() {
+ return int32Type_;
+ }
+ /**
+ * <code>optional int32 int32Type = 3;</code>
+ */
+ public Builder setInt32Type(int value) {
+ bitField0_ |= 0x00000004;
+ int32Type_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional int32 int32Type = 3;</code>
+ */
+ public Builder clearInt32Type() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ int32Type_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional int64 int64Type = 4;
+ private long int64Type_ ;
+ /**
+ * <code>optional int64 int64Type = 4;</code>
+ */
+ public boolean hasInt64Type() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional int64 int64Type = 4;</code>
+ */
+ public long getInt64Type() {
+ return int64Type_;
+ }
+ /**
+ * <code>optional int64 int64Type = 4;</code>
+ */
+ public Builder setInt64Type(long value) {
+ bitField0_ |= 0x00000008;
+ int64Type_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional int64 int64Type = 4;</code>
+ */
+ public Builder clearInt64Type() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ int64Type_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint32 uint32Type = 5;
+ private int uint32Type_ ;
+ /**
+ * <code>optional uint32 uint32Type = 5;</code>
+ */
+ public boolean hasUint32Type() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * <code>optional uint32 uint32Type = 5;</code>
+ */
+ public int getUint32Type() {
+ return uint32Type_;
+ }
+ /**
+ * <code>optional uint32 uint32Type = 5;</code>
+ */
+ public Builder setUint32Type(int value) {
+ bitField0_ |= 0x00000010;
+ uint32Type_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint32 uint32Type = 5;</code>
+ */
+ public Builder clearUint32Type() {
+ bitField0_ = (bitField0_ & ~0x00000010);
+ uint32Type_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 uint64Type = 6;
+ private long uint64Type_ ;
+ /**
+ * <code>optional uint64 uint64Type = 6;</code>
+ */
+ public boolean hasUint64Type() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ /**
+ * <code>optional uint64 uint64Type = 6;</code>
+ */
+ public long getUint64Type() {
+ return uint64Type_;
+ }
+ /**
+ * <code>optional uint64 uint64Type = 6;</code>
+ */
+ public Builder setUint64Type(long value) {
+ bitField0_ |= 0x00000020;
+ uint64Type_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint64 uint64Type = 6;</code>
+ */
+ public Builder clearUint64Type() {
+ bitField0_ = (bitField0_ & ~0x00000020);
+ uint64Type_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional sint32 sint32Type = 7;
+ private int sint32Type_ ;
+ /**
+ * <code>optional sint32 sint32Type = 7;</code>
+ */
+ public boolean hasSint32Type() {
+ return ((bitField0_ & 0x00000040) == 0x00000040);
+ }
+ /**
+ * <code>optional sint32 sint32Type = 7;</code>
+ */
+ public int getSint32Type() {
+ return sint32Type_;
+ }
+ /**
+ * <code>optional sint32 sint32Type = 7;</code>
+ */
+ public Builder setSint32Type(int value) {
+ bitField0_ |= 0x00000040;
+ sint32Type_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional sint32 sint32Type = 7;</code>
+ */
+ public Builder clearSint32Type() {
+ bitField0_ = (bitField0_ & ~0x00000040);
+ sint32Type_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional sint64 sint64Type = 8;
+ private long sint64Type_ ;
+ /**
+ * <code>optional sint64 sint64Type = 8;</code>
+ */
+ public boolean hasSint64Type() {
+ return ((bitField0_ & 0x00000080) == 0x00000080);
+ }
+ /**
+ * <code>optional sint64 sint64Type = 8;</code>
+ */
+ public long getSint64Type() {
+ return sint64Type_;
+ }
+ /**
+ * <code>optional sint64 sint64Type = 8;</code>
+ */
+ public Builder setSint64Type(long value) {
+ bitField0_ |= 0x00000080;
+ sint64Type_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional sint64 sint64Type = 8;</code>
+ */
+ public Builder clearSint64Type() {
+ bitField0_ = (bitField0_ & ~0x00000080);
+ sint64Type_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional fixed32 fixed32Type = 9;
+ private int fixed32Type_ ;
+ /**
+ * <code>optional fixed32 fixed32Type = 9;</code>
+ */
+ public boolean hasFixed32Type() {
+ return ((bitField0_ & 0x00000100) == 0x00000100);
+ }
+ /**
+ * <code>optional fixed32 fixed32Type = 9;</code>
+ */
+ public int getFixed32Type() {
+ return fixed32Type_;
+ }
+ /**
+ * <code>optional fixed32 fixed32Type = 9;</code>
+ */
+ public Builder setFixed32Type(int value) {
+ bitField0_ |= 0x00000100;
+ fixed32Type_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional fixed32 fixed32Type = 9;</code>
+ */
+ public Builder clearFixed32Type() {
+ bitField0_ = (bitField0_ & ~0x00000100);
+ fixed32Type_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional fixed64 fixed64Type = 10;
+ private long fixed64Type_ ;
+ /**
+ * <code>optional fixed64 fixed64Type = 10;</code>
+ */
+ public boolean hasFixed64Type() {
+ return ((bitField0_ & 0x00000200) == 0x00000200);
+ }
+ /**
+ * <code>optional fixed64 fixed64Type = 10;</code>
+ */
+ public long getFixed64Type() {
+ return fixed64Type_;
+ }
+ /**
+ * <code>optional fixed64 fixed64Type = 10;</code>
+ */
+ public Builder setFixed64Type(long value) {
+ bitField0_ |= 0x00000200;
+ fixed64Type_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional fixed64 fixed64Type = 10;</code>
+ */
+ public Builder clearFixed64Type() {
+ bitField0_ = (bitField0_ & ~0x00000200);
+ fixed64Type_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional sfixed32 sfixed32Type = 11;
+ private int sfixed32Type_ ;
+ /**
+ * <code>optional sfixed32 sfixed32Type = 11;</code>
+ */
+ public boolean hasSfixed32Type() {
+ return ((bitField0_ & 0x00000400) == 0x00000400);
+ }
+ /**
+ * <code>optional sfixed32 sfixed32Type = 11;</code>
+ */
+ public int getSfixed32Type() {
+ return sfixed32Type_;
+ }
+ /**
+ * <code>optional sfixed32 sfixed32Type = 11;</code>
+ */
+ public Builder setSfixed32Type(int value) {
+ bitField0_ |= 0x00000400;
+ sfixed32Type_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional sfixed32 sfixed32Type = 11;</code>
+ */
+ public Builder clearSfixed32Type() {
+ bitField0_ = (bitField0_ & ~0x00000400);
+ sfixed32Type_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional sfixed64 sfixed64Type = 12;
+ private long sfixed64Type_ ;
+ /**
+ * <code>optional sfixed64 sfixed64Type = 12;</code>
+ */
+ public boolean hasSfixed64Type() {
+ return ((bitField0_ & 0x00000800) == 0x00000800);
+ }
+ /**
+ * <code>optional sfixed64 sfixed64Type = 12;</code>
+ */
+ public long getSfixed64Type() {
+ return sfixed64Type_;
+ }
+ /**
+ * <code>optional sfixed64 sfixed64Type = 12;</code>
+ */
+ public Builder setSfixed64Type(long value) {
+ bitField0_ |= 0x00000800;
+ sfixed64Type_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional sfixed64 sfixed64Type = 12;</code>
+ */
+ public Builder clearSfixed64Type() {
+ bitField0_ = (bitField0_ & ~0x00000800);
+ sfixed64Type_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional bool boolType = 13;
+ private boolean boolType_ ;
+ /**
+ * <code>optional bool boolType = 13;</code>
+ */
+ public boolean hasBoolType() {
+ return ((bitField0_ & 0x00001000) == 0x00001000);
+ }
+ /**
+ * <code>optional bool boolType = 13;</code>
+ */
+ public boolean getBoolType() {
+ return boolType_;
+ }
+ /**
+ * <code>optional bool boolType = 13;</code>
+ */
+ public Builder setBoolType(boolean value) {
+ bitField0_ |= 0x00001000;
+ boolType_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional bool boolType = 13;</code>
+ */
+ public Builder clearBoolType() {
+ bitField0_ = (bitField0_ & ~0x00001000);
+ boolType_ = false;
+ onChanged();
+ return this;
+ }
+
+ // optional string stringType = 14;
+ private java.lang.Object stringType_ = "";
+ /**
+ * <code>optional string stringType = 14;</code>
+ */
+ public boolean hasStringType() {
+ return ((bitField0_ & 0x00002000) == 0x00002000);
+ }
+ /**
+ * <code>optional string stringType = 14;</code>
+ */
+ public java.lang.String getStringType() {
+ java.lang.Object ref = stringType_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ stringType_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string stringType = 14;</code>
+ */
+ public com.google.protobuf.ByteString
+ getStringTypeBytes() {
+ java.lang.Object ref = stringType_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ stringType_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string stringType = 14;</code>
+ */
+ public Builder setStringType(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00002000;
+ stringType_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string stringType = 14;</code>
+ */
+ public Builder clearStringType() {
+ bitField0_ = (bitField0_ & ~0x00002000);
+ stringType_ = getDefaultInstance().getStringType();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string stringType = 14;</code>
+ */
+ public Builder setStringTypeBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00002000;
+ stringType_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional bytes bytesType = 15;
+ private com.google.protobuf.ByteString bytesType_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>optional bytes bytesType = 15;</code>
+ */
+ public boolean hasBytesType() {
+ return ((bitField0_ & 0x00004000) == 0x00004000);
+ }
+ /**
+ * <code>optional bytes bytesType = 15;</code>
+ */
+ public com.google.protobuf.ByteString getBytesType() {
+ return bytesType_;
+ }
+ /**
+ * <code>optional bytes bytesType = 15;</code>
+ */
+ public Builder setBytesType(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00004000;
+ bytesType_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional bytes bytesType = 15;</code>
+ */
+ public Builder clearBytesType() {
+ bitField0_ = (bitField0_ & ~0x00004000);
+ bytesType_ = getDefaultInstance().getBytesType();
+ onChanged();
+ return this;
+ }
+
+ // repeated .MapFieldEntry mapType = 16;
+ private java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> mapType_ =
+ java.util.Collections.emptyList();
+ private void ensureMapTypeIsMutable() {
+ if (!((bitField0_ & 0x00008000) == 0x00008000)) {
+ mapType_ = new java.util.ArrayList<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>(mapType_);
+ bitField0_ |= 0x00008000;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> mapTypeBuilder_;
+
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> getMapTypeList() {
+ if (mapTypeBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(mapType_);
+ } else {
+ return mapTypeBuilder_.getMessageList();
+ }
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public int getMapTypeCount() {
+ if (mapTypeBuilder_ == null) {
+ return mapType_.size();
+ } else {
+ return mapTypeBuilder_.getCount();
+ }
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry getMapType(int index) {
+ if (mapTypeBuilder_ == null) {
+ return mapType_.get(index);
+ } else {
+ return mapTypeBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public Builder setMapType(
+ int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ if (mapTypeBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureMapTypeIsMutable();
+ mapType_.set(index, value);
+ onChanged();
+ } else {
+ mapTypeBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public Builder setMapType(
+ int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ if (mapTypeBuilder_ == null) {
+ ensureMapTypeIsMutable();
+ mapType_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ mapTypeBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public Builder addMapType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ if (mapTypeBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureMapTypeIsMutable();
+ mapType_.add(value);
+ onChanged();
+ } else {
+ mapTypeBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public Builder addMapType(
+ int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+ if (mapTypeBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureMapTypeIsMutable();
+ mapType_.add(index, value);
+ onChanged();
+ } else {
+ mapTypeBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public Builder addMapType(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ if (mapTypeBuilder_ == null) {
+ ensureMapTypeIsMutable();
+ mapType_.add(builderForValue.build());
+ onChanged();
+ } else {
+ mapTypeBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public Builder addMapType(
+ int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder builderForValue) {
+ if (mapTypeBuilder_ == null) {
+ ensureMapTypeIsMutable();
+ mapType_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ mapTypeBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public Builder addAllMapType(
+ java.lang.Iterable<? extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> values) {
+ if (mapTypeBuilder_ == null) {
+ ensureMapTypeIsMutable();
+ super.addAll(values, mapType_);
+ onChanged();
+ } else {
+ mapTypeBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public Builder clearMapType() {
+ if (mapTypeBuilder_ == null) {
+ mapType_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00008000);
+ onChanged();
+ } else {
+ mapTypeBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public Builder removeMapType(int index) {
+ if (mapTypeBuilder_ == null) {
+ ensureMapTypeIsMutable();
+ mapType_.remove(index);
+ onChanged();
+ } else {
+ mapTypeBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder getMapTypeBuilder(
+ int index) {
+ return getMapTypeFieldBuilder().getBuilder(index);
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder getMapTypeOrBuilder(
+ int index) {
+ if (mapTypeBuilder_ == null) {
+ return mapType_.get(index); } else {
+ return mapTypeBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ getMapTypeOrBuilderList() {
+ if (mapTypeBuilder_ != null) {
+ return mapTypeBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(mapType_);
+ }
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder addMapTypeBuilder() {
+ return getMapTypeFieldBuilder().addBuilder(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder addMapTypeBuilder(
+ int index) {
+ return getMapTypeFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .MapFieldEntry mapType = 16;</code>
+ */
+ public java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder>
+ getMapTypeBuilderList() {
+ return getMapTypeFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>
+ getMapTypeFieldBuilder() {
+ if (mapTypeBuilder_ == null) {
+ mapTypeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>(
+ mapType_,
+ ((bitField0_ & 0x00008000) == 0x00008000),
+ getParentForChildren(),
+ isClean());
+ mapType_ = null;
+ }
+ return mapTypeBuilder_;
+ }
+
+ // repeated string stringListType = 17;
+ private com.google.protobuf.LazyStringList stringListType_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ private void ensureStringListTypeIsMutable() {
+ if (!((bitField0_ & 0x00010000) == 0x00010000)) {
+ stringListType_ = new com.google.protobuf.LazyStringArrayList(stringListType_);
+ bitField0_ |= 0x00010000;
+ }
+ }
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ public java.util.List<java.lang.String>
+ getStringListTypeList() {
+ return java.util.Collections.unmodifiableList(stringListType_);
+ }
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ public int getStringListTypeCount() {
+ return stringListType_.size();
+ }
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ public java.lang.String getStringListType(int index) {
+ return stringListType_.get(index);
+ }
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ public com.google.protobuf.ByteString
+ getStringListTypeBytes(int index) {
+ return stringListType_.getByteString(index);
+ }
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ public Builder setStringListType(
+ int index, java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureStringListTypeIsMutable();
+ stringListType_.set(index, value);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ public Builder addStringListType(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureStringListTypeIsMutable();
+ stringListType_.add(value);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ public Builder addAllStringListType(
+ java.lang.Iterable<java.lang.String> values) {
+ ensureStringListTypeIsMutable();
+ super.addAll(values, stringListType_);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ public Builder clearStringListType() {
+ stringListType_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00010000);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated string stringListType = 17;</code>
+ */
+ public Builder addStringListTypeBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureStringListTypeIsMutable();
+ stringListType_.add(value);
+ onChanged();
+ return this;
+ }
+
+ // optional .Mesg1 messageType = 18;
+ private org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 messageType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder> messageTypeBuilder_;
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ public boolean hasMessageType() {
+ return ((bitField0_ & 0x00020000) == 0x00020000);
+ }
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getMessageType() {
+ if (messageTypeBuilder_ == null) {
+ return messageType_;
+ } else {
+ return messageTypeBuilder_.getMessage();
+ }
+ }
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ public Builder setMessageType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 value) {
+ if (messageTypeBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ messageType_ = value;
+ onChanged();
+ } else {
+ messageTypeBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00020000;
+ return this;
+ }
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ public Builder setMessageType(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder builderForValue) {
+ if (messageTypeBuilder_ == null) {
+ messageType_ = builderForValue.build();
+ onChanged();
+ } else {
+ messageTypeBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00020000;
+ return this;
+ }
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ public Builder mergeMessageType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 value) {
+ if (messageTypeBuilder_ == null) {
+ if (((bitField0_ & 0x00020000) == 0x00020000) &&
+ messageType_ != org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance()) {
+ messageType_ =
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.newBuilder(messageType_).mergeFrom(value).buildPartial();
+ } else {
+ messageType_ = value;
+ }
+ onChanged();
+ } else {
+ messageTypeBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00020000;
+ return this;
+ }
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ public Builder clearMessageType() {
+ if (messageTypeBuilder_ == null) {
+ messageType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+ onChanged();
+ } else {
+ messageTypeBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00020000);
+ return this;
+ }
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder getMessageTypeBuilder() {
+ bitField0_ |= 0x00020000;
+ onChanged();
+ return getMessageTypeFieldBuilder().getBuilder();
+ }
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder getMessageTypeOrBuilder() {
+ if (messageTypeBuilder_ != null) {
+ return messageTypeBuilder_.getMessageOrBuilder();
+ } else {
+ return messageType_;
+ }
+ }
+ /**
+ * <code>optional .Mesg1 messageType = 18;</code>
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>
+ getMessageTypeFieldBuilder() {
+ if (messageTypeBuilder_ == null) {
+ messageTypeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>(
+ messageType_,
+ getParentForChildren(),
+ isClean());
+ messageType_ = null;
+ }
+ return messageTypeBuilder_;
+ }
+
+ // repeated .Mesg1 messageListType = 19;
+ private java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1> messageListType_ =
+ java.util.Collections.emptyList();
+ private void ensureMessageListTypeIsMutable() {
+ if (!((bitField0_ & 0x00040000) == 0x00040000)) {
+ messageListType_ = new java.util.ArrayList<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1>(messageListType_);
+ bitField0_ |= 0x00040000;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder> messageListTypeBuilder_;
+
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1> getMessageListTypeList() {
+ if (messageListTypeBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(messageListType_);
+ } else {
+ return messageListTypeBuilder_.getMessageList();
+ }
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public int getMessageListTypeCount() {
+ if (messageListTypeBuilder_ == null) {
+ return messageListType_.size();
+ } else {
+ return messageListTypeBuilder_.getCount();
+ }
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getMessageListType(int index) {
+ if (messageListTypeBuilder_ == null) {
+ return messageListType_.get(index);
+ } else {
+ return messageListTypeBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public Builder setMessageListType(
+ int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 value) {
+ if (messageListTypeBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureMessageListTypeIsMutable();
+ messageListType_.set(index, value);
+ onChanged();
+ } else {
+ messageListTypeBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public Builder setMessageListType(
+ int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder builderForValue) {
+ if (messageListTypeBuilder_ == null) {
+ ensureMessageListTypeIsMutable();
+ messageListType_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ messageListTypeBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public Builder addMessageListType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 value) {
+ if (messageListTypeBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureMessageListTypeIsMutable();
+ messageListType_.add(value);
+ onChanged();
+ } else {
+ messageListTypeBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public Builder addMessageListType(
+ int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 value) {
+ if (messageListTypeBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureMessageListTypeIsMutable();
+ messageListType_.add(index, value);
+ onChanged();
+ } else {
+ messageListTypeBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public Builder addMessageListType(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder builderForValue) {
+ if (messageListTypeBuilder_ == null) {
+ ensureMessageListTypeIsMutable();
+ messageListType_.add(builderForValue.build());
+ onChanged();
+ } else {
+ messageListTypeBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public Builder addMessageListType(
+ int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder builderForValue) {
+ if (messageListTypeBuilder_ == null) {
+ ensureMessageListTypeIsMutable();
+ messageListType_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ messageListTypeBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public Builder addAllMessageListType(
+ java.lang.Iterable<? extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1> values) {
+ if (messageListTypeBuilder_ == null) {
+ ensureMessageListTypeIsMutable();
+ super.addAll(values, messageListType_);
+ onChanged();
+ } else {
+ messageListTypeBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public Builder clearMessageListType() {
+ if (messageListTypeBuilder_ == null) {
+ messageListType_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00040000);
+ onChanged();
+ } else {
+ messageListTypeBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public Builder removeMessageListType(int index) {
+ if (messageListTypeBuilder_ == null) {
+ ensureMessageListTypeIsMutable();
+ messageListType_.remove(index);
+ onChanged();
+ } else {
+ messageListTypeBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder getMessageListTypeBuilder(
+ int index) {
+ return getMessageListTypeFieldBuilder().getBuilder(index);
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder getMessageListTypeOrBuilder(
+ int index) {
+ if (messageListTypeBuilder_ == null) {
+ return messageListType_.get(index); } else {
+ return messageListTypeBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>
+ getMessageListTypeOrBuilderList() {
+ if (messageListTypeBuilder_ != null) {
+ return messageListTypeBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(messageListType_);
+ }
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder addMessageListTypeBuilder() {
+ return getMessageListTypeFieldBuilder().addBuilder(
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder addMessageListTypeBuilder(
+ int index) {
+ return getMessageListTypeFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .Mesg1 messageListType = 19;</code>
+ */
+ public java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder>
+ getMessageListTypeBuilderList() {
+ return getMessageListTypeFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>
+ getMessageListTypeFieldBuilder() {
+ if (messageListTypeBuilder_ == null) {
+ messageListTypeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>(
+ messageListType_,
+ ((bitField0_ & 0x00040000) == 0x00040000),
+ getParentForChildren(),
+ isClean());
+ messageListType_ = null;
+ }
+ return messageListTypeBuilder_;
+ }
+
+ // optional .AllTypes.Enum1 enumType = 20;
+ private org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 enumType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.VAL1;
+ /**
+ * <code>optional .AllTypes.Enum1 enumType = 20;</code>
+ */
+ public boolean hasEnumType() {
+ return ((bitField0_ & 0x00080000) == 0x00080000);
+ }
+ /**
+ * <code>optional .AllTypes.Enum1 enumType = 20;</code>
+ */
+ public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 getEnumType() {
+ return enumType_;
+ }
+ /**
+ * <code>optional .AllTypes.Enum1 enumType = 20;</code>
+ */
+ public Builder setEnumType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00080000;
+ enumType_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional .AllTypes.Enum1 enumType = 20;</code>
+ */
+ public Builder clearEnumType() {
+ bitField0_ = (bitField0_ & ~0x00080000);
+ enumType_ = org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.VAL1;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:AllTypes)
+ }
+
+ static {
+ defaultInstance = new AllTypes(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:AllTypes)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_MapFieldEntry_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_MapFieldEntry_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_Mesg1_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_Mesg1_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_AllTypes_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_AllTypes_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\022SampleProtos.proto\"+\n\rMapFieldEntry\022\013\n" +
+ "\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t\"[\n\005Mesg1\022\"\n\nan" +
+ "otherMap\030\001 \003(\0132\016.MapFieldEntry\022\035\n\005noMap\030" +
+ "\002 \001(\0132\016.MapFieldEntry\022\017\n\007intList\030\003 \003(\005\"\355" +
+ "\003\n\010AllTypes\022\022\n\ndoubleType\030\001 \001(\001\022\021\n\tfloat" +
+ "Type\030\002 \001(\002\022\021\n\tint32Type\030\003 \001(\005\022\021\n\tint64Ty" +
+ "pe\030\004 \001(\003\022\022\n\nuint32Type\030\005 \001(\r\022\022\n\nuint64Ty" +
+ "pe\030\006 \001(\004\022\022\n\nsint32Type\030\007 \001(\021\022\022\n\nsint64Ty" +
+ "pe\030\010 \001(\022\022\023\n\013fixed32Type\030\t \001(\007\022\023\n\013fixed64" +
+ "Type\030\n \001(\006\022\024\n\014sfixed32Type\030\013 \001(\017\022\024\n\014sfix",
+ "ed64Type\030\014 \001(\020\022\020\n\010boolType\030\r \001(\010\022\022\n\nstri" +
+ "ngType\030\016 \001(\t\022\021\n\tbytesType\030\017 \001(\014\022\037\n\007mapTy" +
+ "pe\030\020 \003(\0132\016.MapFieldEntry\022\026\n\016stringListTy" +
+ "pe\030\021 \003(\t\022\033\n\013messageType\030\022 \001(\0132\006.Mesg1\022\037\n" +
+ "\017messageListType\030\023 \003(\0132\006.Mesg1\022!\n\010enumTy" +
+ "pe\030\024 \001(\0162\017.AllTypes.Enum1\"\033\n\005Enum1\022\010\n\004VA" +
+ "L1\020\001\022\010\n\004VAL2\020\002B5\n%org.apache.hadoop.hive" +
+ ".contrib.serde2B\014SampleProtos"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_MapFieldEntry_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_MapFieldEntry_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_MapFieldEntry_descriptor,
+ new java.lang.String[] { "Key", "Value", });
+ internal_static_Mesg1_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_Mesg1_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_Mesg1_descriptor,
+ new java.lang.String[] { "AnotherMap", "NoMap", "IntList", });
+ internal_static_AllTypes_descriptor =
+ getDescriptor().getMessageTypes().get(2);
+ internal_static_AllTypes_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_AllTypes_descriptor,
+ new java.lang.String[] { "DoubleType", "FloatType", "Int32Type", "Int64Type", "Uint32Type", "Uint64Type", "Sint32Type", "Sint64Type", "Fixed32Type", "Fixed64Type", "Sfixed32Type", "Sfixed64Type", "BoolType", "StringType", "BytesType", "MapType", "StringListType", "MessageType", "MessageListType", "EnumType", });
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/input/ProtobufMessageInputFormat.java b/contrib/src/java/org/apache/hadoop/hive/contrib/input/ProtobufMessageInputFormat.java
new file mode 100644
index 0000000..45c7b5c
--- /dev/null
+++ b/contrib/src/java/org/apache/hadoop/hive/contrib/input/ProtobufMessageInputFormat.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.contrib.input;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.SequenceFileInputFormat;
+import org.apache.tez.dag.history.logging.proto.ProtoMessageWritable;
+
+import com.google.protobuf.MessageLite;
+import com.google.protobuf.Parser;
+
+/**
+ * InputFormat to support reading ProtoWritable stored in a sequence file. You cannot use the
+ * sequence file directly since the createValue method uses default constructor. But ProtoWritable
+ * has a package protected constructor which takes a parser.
+ * By reading the proto class name from job conf which is copied from table properties by Hive this
+ * class manages to give a generic implementation where only can set the proto.class in table
+ * properties and load the file.
+ *
+ * It is also enhanced to ignore EOF exception while opening a file, so as to ignore 0 bytes files
+ * in the table. Maybe we should allow this to be configured.
+ *
+ * @param <K> K for the sequence file.
+ * @param <V> The proto message type stored in the sequence file. Just to keep java compiler happy.
+ */
+public class ProtobufMessageInputFormat<K, V extends MessageLite>
+ extends SequenceFileInputFormat<K, ProtoMessageWritable<V>> {
+ private static final String PROTO_CLASS = "proto.class";
+
+ @SuppressWarnings("unchecked")
+ private Parser<V> getParser(String protoClass) throws IOException {
+ if (protoClass == null) {
+ throw new IOException("Please specificy table property: " + PROTO_CLASS);
+ }
+ try {
+ Class<?> clazz = getClass().getClassLoader().loadClass(protoClass);
+ return (Parser<V>)clazz.getField("PARSER").get(null);
+ } catch (ClassNotFoundException | IllegalArgumentException | IllegalAccessException |
+ NoSuchFieldException | SecurityException e) {
+ throw new IOException("Could not load class: " + protoClass, e);
+ }
+ }
+
+ private RecordReader<K, ProtoMessageWritable<V>> getSafeRecordReader(InputSplit split,
+ JobConf job, Reporter reporter) throws IOException {
+ try {
+ return super.getRecordReader(split, job, reporter);
+ } catch (EOFException e) {
+ // Ignore EOFException, we create an empty reader for this, instead of failing.
+ return null;
+ }
+ }
+
+ @Override
+ public RecordReader<K, ProtoMessageWritable<V>> getRecordReader(InputSplit split,
+ JobConf job, Reporter reporter) throws IOException {
+
+ final Parser<V> parser = getParser(job.get(PROTO_CLASS));
+ final RecordReader<K, ProtoMessageWritable<V>> reader = getSafeRecordReader(
+ split, job, reporter);
+ return new RecordReader<K, ProtoMessageWritable<V>>() {
+ // Overload create value, since there is no default constructor for ProtoMessageWritable.
+ @SuppressWarnings("unchecked")
+ @Override
+ public ProtoMessageWritable<V> createValue() {
+ try {
+ @SuppressWarnings("rawtypes")
+ Constructor<ProtoMessageWritable> cons = ProtoMessageWritable.class
+ .getDeclaredConstructor(Parser.class);
+ cons.setAccessible(true);
+ return cons.newInstance(parser);
+ } catch (Exception e) {
+ throw new RuntimeException("Unexpected error: ", e);
+ }
+ }
+
+ @Override
+ public K createKey() {
+ return reader != null ? reader.createKey() : null;
+ }
+
+ @Override
+ public void close() throws IOException {
+ if (reader != null) {
+ reader.close();
+ }
+ }
+
+ @Override
+ public long getPos() throws IOException {
+ return reader != null ? reader.getPos() : 0;
+ }
+
+ @Override
+ public float getProgress() throws IOException {
+ return reader != null ? reader.getProgress() : 1.0f;
+ }
+
+ @Override
+ public boolean next(K arg0, ProtoMessageWritable<V> arg1) throws IOException {
+ return reader != null ? reader.next(arg0, arg1) : false;
+ }
+ };
+ }
+}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java b/contrib/src/java/org/apache/hadoop/hive/contrib/input/package-info.java
similarity index 85%
copy from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java
copy to contrib/src/java/org/apache/hadoop/hive/contrib/input/package-info.java
index dd2aacd..e70d245 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java
+++ b/contrib/src/java/org/apache/hadoop/hive/contrib/input/package-info.java
@@ -17,7 +17,7 @@
*/
/**
- * This package contains the HiveMetaTool program and the classes used by it.
+ * This package contains contributed input format.
*/
-package org.apache.hadoop.hive.metastore.tool.metatool;
+package org.apache.hadoop.hive.contrib.input;
diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufBytesWritableSerDe.java b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufBytesWritableSerDe.java
new file mode 100644
index 0000000..d6c18ee
--- /dev/null
+++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufBytesWritableSerDe.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.contrib.serde2;
+
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Writable;
+
+import com.google.protobuf.InvalidProtocolBufferException;
+import com.google.protobuf.Message;
+import com.google.protobuf.Parser;
+
+/**
+ * Class to convert bytes writable containing a protobuf message to hive formats.
+ * @see ProtobufSerDe
+ */
+public class ProtobufBytesWritableSerDe extends ProtobufSerDe {
+ private Parser<? extends Message> parser;
+
+ @Override
+ public void initialize(Configuration conf, Properties tbl) throws SerDeException {
+ super.initialize(conf, tbl);
+ try {
+ @SuppressWarnings("unchecked")
+ Parser<? extends Message> tmpParser = (Parser<? extends Message>)protoMessageClass
+ .getField("PARSER").get(null);
+ this.parser = tmpParser;
+ } catch (IllegalArgumentException | IllegalAccessException | NoSuchFieldException
+ | SecurityException e) {
+ throw new SerDeException("Unable get PARSER from class: " + protoMessageClass.getName(), e);
+ }
+ }
+
+ @Override
+ protected Message toMessage(Writable writable) throws SerDeException {
+ try {
+ BytesWritable bytes = (BytesWritable)writable;
+ return parser.parseFrom(bytes.getBytes(), 0, bytes.getLength());
+ } catch (InvalidProtocolBufferException e) {
+ throw new SerDeException("Unable to parse proto message", e);
+ }
+ }
+}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufMessageSerDe.java
similarity index 62%
copy from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java
copy to contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufMessageSerDe.java
index dd2aacd..d584f78 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java
+++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufMessageSerDe.java
@@ -16,8 +16,22 @@
* limitations under the License.
*/
+package org.apache.hadoop.hive.contrib.serde2;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.tez.dag.history.logging.proto.ProtoMessageWritable;
+
+import com.google.protobuf.Message;
+
/**
- * This package contains the HiveMetaTool program and the classes used by it.
+ * Class to convert ProtoMessageWritable to hive formats.
+ * @see ProtobufSerDe
*/
-package org.apache.hadoop.hive.metastore.tool.metatool;
+public class ProtobufMessageSerDe extends ProtobufSerDe {
+
+ @SuppressWarnings("unchecked")
+ protected Message toMessage(Writable writable) {
+ return ((ProtoMessageWritable<Message>)writable).getMessage();
+ }
+}
diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufSerDe.java b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufSerDe.java
new file mode 100644
index 0000000..0b7f721
--- /dev/null
+++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufSerDe.java
@@ -0,0 +1,377 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.contrib.serde2;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.Writable;
+import org.apache.tez.dag.history.logging.proto.ProtoMessageWritable;
+
+import com.google.common.collect.Sets;
+import com.google.protobuf.ByteString;
+import com.google.protobuf.Descriptors.Descriptor;
+import com.google.protobuf.Descriptors.EnumValueDescriptor;
+import com.google.protobuf.Descriptors.FieldDescriptor;
+import com.google.protobuf.Descriptors.FieldDescriptor.JavaType;
+import com.google.protobuf.Message;
+
+/**
+ * SerDe to convert ProtoWritable messages to Hive formats.
+ * The serde supports the following properties:
+ * <ul>
+ * <li>proto.class: This is required and specifies the class to be used to read the messages.</li>
+ * <li>proto.maptypes: This is optional it declares set of protobuf types that have to be
+ * converted to map objects instead of struct type. It is applied only on a repeated struct
+ * field. The message should have 2 fields, first is used as key and second is used as value.
+ * </li>
+ * </ul>
+ */
+public abstract class ProtobufSerDe extends AbstractSerDe {
+ static final String PROTO_CLASS = "proto.class";
+ static final String MAP_TYPES = "proto.maptypes";
+
+ protected Class<? extends Message> protoMessageClass;
+ private ProtoToHiveConvertor convertor;
+ private ObjectInspector objectInspector;
+ private Set<String> mapTypes;
+
+ @Override
+ public void initialize(Configuration conf, Properties tbl) throws SerDeException {
+ this.mapTypes = Sets.newHashSet(tbl.getProperty(MAP_TYPES, "").trim().split("\\s*,\\s*"));
+
+ protoMessageClass = loadClass(tbl.getProperty(PROTO_CLASS));
+ Descriptor descriptor = loadDescriptor(protoMessageClass);
+
+ Map<Descriptor, ObjectInspector> cache = new HashMap<>();
+ this.objectInspector = createStructObjectInspector(descriptor, cache);
+
+ Map<Descriptor, ProtoToHiveConvertor> convertorCache = new HashMap<>();
+ this.convertor = createConvertor(descriptor, convertorCache);
+ }
+
+ private Class<? extends Message> loadClass(String protoClass) throws SerDeException {
+ if (protoClass == null) {
+ throw new SerDeException(PROTO_CLASS + " has to be set.");
+ }
+ try {
+ Class<?> clazz = getClass().getClassLoader().loadClass(protoClass);
+ if (!Message.class.isAssignableFrom(clazz)) {
+ throw new SerDeException("Invalid class: " + clazz.getName() + " is not type of: " +
+ Message.class.getName());
+ }
+ @SuppressWarnings("unchecked")
+ Class<? extends Message> serdeClass = (Class<? extends Message>) clazz;
+ return serdeClass;
+ } catch (ClassNotFoundException e) {
+ throw new SerDeException("Cannot find/load class: " + protoClass, e);
+ }
+ }
+
+ private static Descriptor loadDescriptor(Class<? extends Message> protoClass)
+ throws SerDeException {
+ try {
+ Method method = protoClass.getMethod("getDescriptor", (Class<?>[])null);
+ return (Descriptor)method.invoke(null, (Object[])null);
+ } catch (InvocationTargetException | NoSuchMethodException | SecurityException |
+ IllegalAccessException | IllegalArgumentException e) {
+ throw new SerDeException("Error trying to get descriptor for class: " + protoClass.getName(),
+ e);
+ }
+ }
+
+ @Override
+ public Class<? extends Writable> getSerializedClass() {
+ return ProtoMessageWritable.class;
+ }
+
+ @Override
+ public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
+ throw new UnsupportedOperationException("Not implemented serialize");
+ }
+
+ @Override
+ public SerDeStats getSerDeStats() {
+ return null;
+ }
+
+ @Override
+ public Object deserialize(Writable blob) throws SerDeException {
+ if (blob == null) {
+ return null;
+ }
+ Message message = toMessage(blob);
+ if (message == null) {
+ return null;
+ }
+ return convertor.convert(message);
+ }
+
+ /**
+ * Convert the given writable to a message.
+ * @param writable The writable object containing the message.
+ * @return The converted message object.
+ * @throws SerDeException
+ */
+ protected abstract Message toMessage(Writable writable) throws SerDeException;
+
+ @Override
+ public ObjectInspector getObjectInspector() throws SerDeException {
+ return objectInspector;
+ }
+
+ private ObjectInspector createStructObjectInspector(Descriptor descriptor,
+ Map<Descriptor, ObjectInspector> cache) throws SerDeException {
+ if (cache.containsKey(descriptor)) {
+ return cache.get(descriptor);
+ }
+ List<String> columnNames = new ArrayList<>();
+ List<ObjectInspector> columnOI = new ArrayList<>();
+ for (FieldDescriptor field : descriptor.getFields()) {
+ columnNames.add(field.getName());
+ columnOI.add(createObjectInspector(field, cache));
+ }
+ ObjectInspector oi = ObjectInspectorFactory.getStandardStructObjectInspector(
+ columnNames, columnOI);
+ cache.put(descriptor, oi);
+ return oi;
+ }
+
+ private ObjectInspector createObjectInspector(FieldDescriptor descriptor,
+ Map<Descriptor, ObjectInspector> cache) throws SerDeException {
+ ObjectInspector oi;
+ switch(descriptor.getJavaType()) {
+ case BOOLEAN:
+ oi = getPrimitive(PrimitiveCategory.BOOLEAN);
+ break;
+ case BYTE_STRING:
+ oi = getPrimitive(PrimitiveCategory.BINARY);
+ break;
+ case DOUBLE:
+ oi = getPrimitive(PrimitiveCategory.DOUBLE);
+ break;
+ case ENUM:
+ oi = getPrimitive(PrimitiveCategory.STRING);
+ break;
+ case FLOAT:
+ oi = getPrimitive(PrimitiveCategory.FLOAT);
+ break;
+ case INT:
+ oi = getPrimitive(PrimitiveCategory.INT);
+ break;
+ case LONG:
+ oi = getPrimitive(PrimitiveCategory.LONG);
+ break;
+ case STRING:
+ oi = getPrimitive(PrimitiveCategory.STRING);
+ break;
+ case MESSAGE:
+ Descriptor msgType = descriptor.getMessageType();
+ if (descriptor.isRepeated() && mapTypes.contains(msgType.getFullName())) {
+ return getMapObjectInspector(msgType, cache);
+ } else {
+ oi = createStructObjectInspector(msgType, cache);
+ }
+ break;
+ default:
+ throw new IllegalArgumentException("unexpected type: " + descriptor.getJavaType());
+ }
+ return descriptor.isRepeated() ? ObjectInspectorFactory.getStandardListObjectInspector(oi) : oi;
+ }
+
+ private ObjectInspector getMapObjectInspector(Descriptor descriptor,
+ Map<Descriptor, ObjectInspector> cache) throws SerDeException {
+ List<FieldDescriptor> fields = descriptor.getFields();
+ if (fields.size() != 2) {
+ throw new SerDeException("Map type " + descriptor.getFullName() +
+ " should have only 2 fields, got: " + fields.size());
+ }
+ ObjectInspector keyOI = createObjectInspector(fields.get(0), cache);
+ ObjectInspector valueOI = createObjectInspector(fields.get(1), cache);
+ return ObjectInspectorFactory.getStandardMapObjectInspector(keyOI, valueOI);
+ }
+
+ private static ObjectInspector getPrimitive(PrimitiveCategory cat) {
+ return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(cat);
+ }
+
+ private ProtoToHiveConvertor createConvertor(Descriptor descriptor,
+ Map<Descriptor, ProtoToHiveConvertor> cache) throws SerDeException {
+ if (cache.containsKey(descriptor)) {
+ return cache.get(descriptor);
+ }
+ List<FieldDescriptor> fields = descriptor.getFields();
+ StructConvertor scConvertor = new StructConvertor(fields.size());
+ int i = 0;
+ for (FieldDescriptor field : descriptor.getFields()) {
+ ProtoToHiveConvertor fc;
+ if (field.getJavaType() == JavaType.MESSAGE) {
+ fc = createConvertor(field.getMessageType(), cache);
+ } else if (field.getJavaType() == JavaType.BYTE_STRING) {
+ fc = ByteStringConvertor.INSTANCE;
+ } else if (field.getJavaType() == JavaType.ENUM) {
+ fc = EnumConvertor.INSTANCE;
+ } else {
+ fc = IdentityConvertor.INSTANCE;
+ }
+ if (field.isRepeated()) {
+ if (field.getJavaType() == JavaType.MESSAGE &&
+ mapTypes.contains(field.getMessageType().getFullName())) {
+ if (field.getMessageType().getFields().size() != 2) {
+ throw new SerDeException("Expected exactly 2 fields for: " +
+ field.getMessageType().getFullName());
+ }
+ fc = new MapConvertor(fc);
+ } else {
+ fc = new ListConvertor(fc);
+ }
+ }
+ scConvertor.add(i++, field, fc);
+ }
+ cache.put(descriptor, scConvertor);
+ return scConvertor;
+ }
+
+ private interface ProtoToHiveConvertor {
+ default Object extractAndConvert(FieldDescriptor field, Message msg) {
+ Object val = msg.hasField(field) ? msg.getField(field) : null;
+ return val == null ? null : convert(val);
+ }
+
+ Object convert(Object obj);
+ }
+
+ private static class StructConvertor implements ProtoToHiveConvertor {
+ private final FieldDescriptor[] fields;
+ private final ProtoToHiveConvertor[] convertors;
+
+ StructConvertor(int size) {
+ this.fields = new FieldDescriptor[size];
+ this.convertors = new ProtoToHiveConvertor[size];
+ }
+
+ void add(int i, FieldDescriptor field, ProtoToHiveConvertor convertor) {
+ fields[i] = field;
+ convertors[i] = convertor;
+ }
+
+ @Override
+ public Object convert(Object obj) {
+ Message msg = (Message)obj;
+ Object[] ret = new Object[fields.length];
+ for (int i = 0; i < fields.length; ++i) {
+ ret[i] = convertors[i].extractAndConvert(fields[i], msg);
+ }
+ return ret;
+ }
+ }
+
+ private static class ListConvertor implements ProtoToHiveConvertor {
+ private final ProtoToHiveConvertor convertor;
+
+ ListConvertor(ProtoToHiveConvertor convertor) {
+ this.convertor = convertor;
+ }
+
+ @Override
+ public Object extractAndConvert(FieldDescriptor field, Message msg) {
+ int count = msg.getRepeatedFieldCount(field);
+ if (count == 0) {
+ return null;
+ }
+ Object[] val = new Object[count];
+ for (int j = 0; j < count; ++j) {
+ val[j] = convertor.convert(msg.getRepeatedField(field, j));
+ }
+ return val;
+ }
+
+ @Override
+ public Object convert(Object obj) {
+ throw new UnsupportedOperationException("Use extractAndConvert for ListConvertor");
+ }
+ }
+
+ private static class MapConvertor implements ProtoToHiveConvertor {
+ private final ProtoToHiveConvertor convertor;
+
+ MapConvertor(ProtoToHiveConvertor convertor) {
+ this.convertor = convertor;
+ }
+
+ @Override
+ public Object extractAndConvert(FieldDescriptor field, Message msg) {
+ int count = msg.getRepeatedFieldCount(field);
+ if (count == 0) {
+ return null;
+ }
+ Map<Object, Object> val = new HashMap<>(count);
+ for (int j = 0; j < count; ++j) {
+ Object[] entry = (Object[])convertor.convert(msg.getRepeatedField(field, j));
+ val.put(entry[0], entry[1]);
+ }
+ return val;
+ }
+
+ @Override
+ public Object convert(Object obj) {
+ throw new UnsupportedOperationException("Use extractAndConvert for MapConvertor");
+ }
+ }
+
+ private static class ByteStringConvertor implements ProtoToHiveConvertor {
+ private static final ProtoToHiveConvertor INSTANCE = new ByteStringConvertor();
+
+ @Override
+ public Object convert(Object obj) {
+ return ((ByteString)obj).toByteArray();
+ }
+ }
+
+ private static class EnumConvertor implements ProtoToHiveConvertor {
+ private static final ProtoToHiveConvertor INSTANCE = new EnumConvertor();
+ @Override
+ public Object convert(Object obj) {
+ return ((EnumValueDescriptor)obj).getName();
+ }
+ }
+
+ private static class IdentityConvertor implements ProtoToHiveConvertor {
+ private static final ProtoToHiveConvertor INSTANCE = new IdentityConvertor();
+
+ @Override
+ public Object convert(Object obj) {
+ return obj;
+ }
+ }
+}
diff --git a/contrib/src/protobuf-test/SampleProtos.proto b/contrib/src/protobuf-test/SampleProtos.proto
new file mode 100644
index 0000000..c7d0453
--- /dev/null
+++ b/contrib/src/protobuf-test/SampleProtos.proto
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+option java_package = "org.apache.hadoop.hive.contrib.serde2";
+option java_outer_classname = "SampleProtos";
+
+message MapFieldEntry {
+ optional string key = 1;
+ optional string value = 2;
+}
+
+message Mesg1 {
+ repeated MapFieldEntry anotherMap = 1;
+ optional MapFieldEntry noMap = 2;
+ repeated int32 intList = 3;
+}
+
+message AllTypes {
+ optional double doubleType = 1;
+ optional float floatType = 2;
+ optional int32 int32Type = 3;
+ optional int64 int64Type = 4;
+ optional uint32 uint32Type = 5;
+ optional uint64 uint64Type = 6;
+ optional sint32 sint32Type = 7;
+ optional sint64 sint64Type = 8;
+ optional fixed32 fixed32Type = 9;
+ optional fixed64 fixed64Type = 10;
+ optional sfixed32 sfixed32Type = 11;
+ optional sfixed64 sfixed64Type = 12;
+ optional bool boolType = 13;
+ optional string stringType = 14;
+ optional bytes bytesType = 15;
+ repeated MapFieldEntry mapType = 16;
+ repeated string stringListType = 17;
+ optional Mesg1 messageType = 18;
+ repeated Mesg1 messageListType = 19;
+ enum Enum1 {
+ VAL1 = 1;
+ VAL2 = 2;
+ };
+ optional Enum1 enumType = 20;
+}
diff --git a/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestProtoMessageSerDe.java b/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestProtoMessageSerDe.java
new file mode 100644
index 0000000..4f31f10
--- /dev/null
+++ b/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestProtoMessageSerDe.java
@@ -0,0 +1,247 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.contrib.serde2;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.lang.reflect.Constructor;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes;
+import org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1;
+import org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry;
+import org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.tez.dag.history.logging.proto.ProtoMessageWritable;
+import org.junit.Test;
+
+import com.google.protobuf.ByteString;
+import com.google.protobuf.Message;
+import com.google.protobuf.Parser;
+
+/**
+ * Test class for ProtobufSerDe.
+ */
+public class TestProtoMessageSerDe {
+ private static ObjectInspector stroi = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
+ private static ObjectInspector intoi = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
+ private static ObjectInspector strmapoi = ObjectInspectorFactory.getStandardMapObjectInspector(
+ stroi, stroi);
+ private static ObjectInspector mfoi = structoi(list("key", "value"), list(stroi, stroi));
+ private static ObjectInspector m1oi = structoi(list("anotherMap", "noMap", "intList"),
+ list(strmapoi, mfoi, listoi(intoi)));
+
+ private ProtobufSerDe serde;
+
+ private Configuration conf = new Configuration(false);
+ @SuppressWarnings("unchecked")
+ private <T extends Message> ProtoMessageWritable<T> init(Class<T> clazz, String mapTypes)
+ throws Exception {
+ serde = new ProtobufMessageSerDe();
+ Properties tbl = new Properties();
+ tbl.setProperty(ProtobufSerDe.PROTO_CLASS, clazz.getName());
+ tbl.setProperty(ProtobufSerDe.MAP_TYPES, mapTypes);
+ serde.initialize(conf, tbl);
+
+ @SuppressWarnings("rawtypes")
+ Constructor<ProtoMessageWritable> cons = ProtoMessageWritable.class.getDeclaredConstructor(
+ Parser.class);
+ cons.setAccessible(true);
+ return cons.newInstance((Parser<T>)clazz.getField("PARSER").get(null));
+ }
+
+ private MapFieldEntry makeMap(int i) {
+ return MapFieldEntry.newBuilder().setKey("key" + i).setValue("val" + i).build();
+ }
+
+ private Mesg1 makeMesg1(int start) {
+ return Mesg1.newBuilder()
+ .addAnotherMap(makeMap(start + 1)).addAnotherMap(makeMap(start + 2))
+ .setNoMap(makeMap(start + 3))
+ .addIntList(start + 4).addIntList(start + 5).build();
+ }
+
+ @Test
+ public void testSimpleMessage() throws Exception {
+ ProtoMessageWritable<MapFieldEntry> writable = init(MapFieldEntry.class,
+ " MapFieldEntry , Invalid ");
+ assertEquals(mfoi, serde.getObjectInspector());
+
+ writable.setMessage(MapFieldEntry.getDefaultInstance());
+ assertArrayEquals(arr(null, null), (Object[])serde.deserialize(writable));
+
+ MapFieldEntry proto = makeMap(1);
+ writable.setMessage(proto);
+ Object obj = serde.deserialize(writable);
+ assertTrue(obj instanceof Object[]);
+ assertArrayEquals(arr(proto.getKey(), proto.getValue()), (Object[])obj);
+ }
+
+ @Test
+ public void testMapAndList() throws Exception {
+ ProtoMessageWritable<Mesg1> writable = init(Mesg1.class, "MapFieldEntry,Invalid");
+ assertEquals(m1oi, serde.getObjectInspector());
+
+ writable.setMessage(Mesg1.getDefaultInstance());
+ assertArrayEquals(arr(null, null, null), (Object[])serde.deserialize(writable));
+
+ Mesg1 proto = makeMesg1(0);
+ writable.setMessage(proto);
+ assertArrayEquals(arr(map("key1", "val1", "key2", "val2"), arr("key3", "val3"), arr(4, 5)),
+ (Object[])serde.deserialize(writable));
+ }
+
+ @Test
+ public void testMapAndListNoMapConfigured() throws Exception {
+ ProtoMessageWritable<Mesg1> writable = init(Mesg1.class, "");
+ assertEquals(structoi(list("anotherMap", "noMap", "intList"),
+ list(listoi(mfoi), mfoi, listoi(intoi))), serde.getObjectInspector());
+
+ writable.setMessage(Mesg1.getDefaultInstance());
+ assertArrayEquals(arr(null, null, null), (Object[])serde.deserialize(writable));
+
+ Mesg1 proto = makeMesg1(0);
+ writable.setMessage(proto);
+ assertArrayEquals(arr(arr(arr("key1", "val1"), arr("key2", "val2")), arr("key3", "val3"),
+ arr(4, 5)), (Object[])serde.deserialize(writable));
+ }
+
+ @Test
+ public void testAll() throws Exception {
+ ProtoMessageWritable<AllTypes> writable = init(AllTypes.class, "MapFieldEntry");
+ ObjectInspector oi = structoi(
+ list("doubleType", "floatType", "int32Type", "int64Type", "uint32Type", "uint64Type",
+ "sint32Type", "sint64Type", "fixed32Type", "fixed64Type", "sfixed32Type",
+ "sfixed64Type", "boolType", "stringType", "bytesType", "mapType", "stringListType",
+ "messageType", "messageListType", "enumType"),
+ list(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
+ PrimitiveObjectInspectorFactory.javaFloatObjectInspector,
+ PrimitiveObjectInspectorFactory.javaIntObjectInspector,
+ PrimitiveObjectInspectorFactory.javaLongObjectInspector,
+ PrimitiveObjectInspectorFactory.javaIntObjectInspector,
+ PrimitiveObjectInspectorFactory.javaLongObjectInspector,
+ PrimitiveObjectInspectorFactory.javaIntObjectInspector,
+ PrimitiveObjectInspectorFactory.javaLongObjectInspector,
+ PrimitiveObjectInspectorFactory.javaIntObjectInspector,
+ PrimitiveObjectInspectorFactory.javaLongObjectInspector,
+ PrimitiveObjectInspectorFactory.javaIntObjectInspector,
+ PrimitiveObjectInspectorFactory.javaLongObjectInspector,
+ PrimitiveObjectInspectorFactory.javaBooleanObjectInspector,
+ PrimitiveObjectInspectorFactory.javaStringObjectInspector,
+ PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector,
+ strmapoi,
+ listoi(stroi),
+ m1oi,
+ listoi(m1oi),
+ stroi)
+ );
+ assertEquals(oi, serde.getObjectInspector());
+
+ writable.setMessage(AllTypes.getDefaultInstance());
+ assertArrayEquals(arr(null, null, null, null, null, null, null, null, null, null, null, null,
+ null, null, null, null, null, null, null, null), (Object[])serde.deserialize(writable));
+
+ AllTypes proto = AllTypes.newBuilder()
+ .setDoubleType(1.0)
+ .setFloatType(2.0f)
+ .setInt32Type(3)
+ .setInt64Type(4)
+ .setUint32Type(5)
+ .setUint64Type(6)
+ .setSint32Type(7)
+ .setSint64Type(8)
+ .setFixed32Type(9)
+ .setFixed64Type(10)
+ .setSfixed32Type(11)
+ .setSfixed64Type(12)
+ .setBoolType(true)
+ .setStringType("val13")
+ .setBytesType(ByteString.copyFrom(new byte[] {14, 15}))
+ .addMapType(makeMap(16))
+ .addMapType(makeMap(17))
+ .addStringListType("val18")
+ .addStringListType("val19")
+ .setMessageType(makeMesg1(19))
+ .addMessageListType(makeMesg1(24))
+ .addMessageListType(makeMesg1(29))
+ .setEnumType(Enum1.VAL1)
+ .build();
+ writable.setMessage(proto);
+ assertArrayEquals(arr(1.0d, 2.0f, 3, 4L, 5, 6L, 7, 8L, 9, 10L, 11, 12L, true, "val13",
+ new byte[] {14, 15}, map("key16", "val16", "key17", "val17"), arr("val18", "val19"),
+ arr(map("key20", "val20", "key21", "val21"), arr("key22", "val22"), arr(23, 24)),
+ arr(arr(map("key25", "val25", "key26", "val26"), arr("key27", "val27"), arr(28, 29)),
+ arr(map("key30", "val30", "key31", "val31"), arr("key32", "val32"), arr(33, 34))), "VAL1"),
+ (Object[])serde.deserialize(writable));
+ }
+
+ @Test
+ public void testBytesWritable() throws Exception {
+ serde = new ProtobufBytesWritableSerDe();
+ Properties tbl = new Properties();
+ tbl.setProperty(ProtobufSerDe.PROTO_CLASS, MapFieldEntry.class.getName());
+ tbl.setProperty(ProtobufSerDe.MAP_TYPES, "MapFieldEntry");
+ serde.initialize(conf, tbl);
+ assertEquals(mfoi, serde.getObjectInspector());
+
+ BytesWritable writable = new BytesWritable(MapFieldEntry.getDefaultInstance().toByteArray());
+ assertArrayEquals(arr(null, null), (Object[])serde.deserialize(writable));
+
+ MapFieldEntry proto = makeMap(1);
+ writable = new BytesWritable(proto.toByteArray());
+ Object obj = serde.deserialize(writable);
+ assertTrue(obj instanceof Object[]);
+ assertArrayEquals(arr(proto.getKey(), proto.getValue()), (Object[])obj);
+ }
+
+ private static ObjectInspector structoi(List<String> names, List<ObjectInspector> ois) {
+ return ObjectInspectorFactory.getStandardStructObjectInspector(names, ois);
+ }
+
+ private static ObjectInspector listoi(ObjectInspector oi) {
+ return ObjectInspectorFactory.getStandardListObjectInspector(oi);
+ }
+
+ @SafeVarargs
+ private static <T> List<T> list(T ... ts) {
+ return Arrays.asList(ts);
+ }
+
+ private static Map<String, String> map(String ... s) {
+ Map<String, String> ret = new HashMap<>();
+ for (int i = 0; i < s.length; i += 2) {
+ ret.put(s[i], s[i + 1]);
+ }
+ return ret;
+ }
+
+ private static Object[] arr(Object ... objs) {
+ return objs;
+ }
+}
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestCachedStoreUpdateUsingEvents.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/cache/TestCachedStoreUpdateUsingEvents.java
similarity index 100%
rename from itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestCachedStoreUpdateUsingEvents.java
rename to itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/cache/TestCachedStoreUpdateUsingEvents.java
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java
index dd2aacd..3b4574f 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/metatool/package-info.java
@@ -19,5 +19,5 @@
/**
* This package contains the HiveMetaTool program and the classes used by it.
*/
-package org.apache.hadoop.hive.metastore.tool.metatool;
+package org.apache.hadoop.hive.metastore.tools.metatool;