You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ke...@apache.org on 2013/03/05 21:44:52 UTC
svn commit: r1452992 [2/8] - in /hive/trunk: ./ ivy/ ql/
ql/src/gen/protobuf/ ql/src/gen/protobuf/gen-java/
ql/src/gen/protobuf/gen-java/org/ ql/src/gen/protobuf/gen-java/org/apache/
ql/src/gen/protobuf/gen-java/org/apache/hadoop/ ql/src/gen/protobuf/g...
Added: hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java?rev=1452992&view=auto
==============================================================================
--- hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java (added)
+++ hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java Tue Mar 5 20:44:50 2013
@@ -0,0 +1,9811 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: orc_proto.proto
+
+package org.apache.hadoop.hive.ql.io.orc;
+
+public final class OrcProto {
+ private OrcProto() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public enum CompressionKind
+ implements com.google.protobuf.ProtocolMessageEnum {
+ NONE(0, 0),
+ ZLIB(1, 1),
+ SNAPPY(2, 2),
+ LZO(3, 3),
+ ;
+
+ public static final int NONE_VALUE = 0;
+ public static final int ZLIB_VALUE = 1;
+ public static final int SNAPPY_VALUE = 2;
+ public static final int LZO_VALUE = 3;
+
+
+ public final int getNumber() { return value; }
+
+ public static CompressionKind valueOf(int value) {
+ switch (value) {
+ case 0: return NONE;
+ case 1: return ZLIB;
+ case 2: return SNAPPY;
+ case 3: return LZO;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap<CompressionKind>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap<CompressionKind>
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap<CompressionKind>() {
+ public CompressionKind findValueByNumber(int number) {
+ return CompressionKind.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final CompressionKind[] VALUES = {
+ NONE, ZLIB, SNAPPY, LZO,
+ };
+
+ public static CompressionKind valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private CompressionKind(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.ql.io.orc.CompressionKind)
+ }
+
+ public interface IntegerStatisticsOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional sint64 minimum = 1;
+ boolean hasMinimum();
+ long getMinimum();
+
+ // optional sint64 maximum = 2;
+ boolean hasMaximum();
+ long getMaximum();
+
+ // optional sint64 sum = 3;
+ boolean hasSum();
+ long getSum();
+ }
+ public static final class IntegerStatistics extends
+ com.google.protobuf.GeneratedMessage
+ implements IntegerStatisticsOrBuilder {
+ // Use IntegerStatistics.newBuilder() to construct.
+ private IntegerStatistics(Builder builder) {
+ super(builder);
+ }
+ private IntegerStatistics(boolean noInit) {}
+
+ private static final IntegerStatistics defaultInstance;
+ public static IntegerStatistics getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public IntegerStatistics getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_IntegerStatistics_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_IntegerStatistics_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // optional sint64 minimum = 1;
+ public static final int MINIMUM_FIELD_NUMBER = 1;
+ private long minimum_;
+ public boolean hasMinimum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public long getMinimum() {
+ return minimum_;
+ }
+
+ // optional sint64 maximum = 2;
+ public static final int MAXIMUM_FIELD_NUMBER = 2;
+ private long maximum_;
+ public boolean hasMaximum() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public long getMaximum() {
+ return maximum_;
+ }
+
+ // optional sint64 sum = 3;
+ public static final int SUM_FIELD_NUMBER = 3;
+ private long sum_;
+ public boolean hasSum() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public long getSum() {
+ return sum_;
+ }
+
+ private void initFields() {
+ minimum_ = 0L;
+ maximum_ = 0L;
+ sum_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeSInt64(1, minimum_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeSInt64(2, maximum_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeSInt64(3, sum_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeSInt64Size(1, minimum_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeSInt64Size(2, maximum_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeSInt64Size(3, sum_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatisticsOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_IntegerStatistics_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_IntegerStatistics_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ minimum_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ maximum_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ sum_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics.getDescriptor();
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics getDefaultInstanceForType() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics build() {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics buildPartial() {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics result = new org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.minimum_ = minimum_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.maximum_ = maximum_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.sum_ = sum_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics) {
+ return mergeFrom((org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics other) {
+ if (other == org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics.getDefaultInstance()) return this;
+ if (other.hasMinimum()) {
+ setMinimum(other.getMinimum());
+ }
+ if (other.hasMaximum()) {
+ setMaximum(other.getMaximum());
+ }
+ if (other.hasSum()) {
+ setSum(other.getSum());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ minimum_ = input.readSInt64();
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ maximum_ = input.readSInt64();
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ sum_ = input.readSInt64();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // optional sint64 minimum = 1;
+ private long minimum_ ;
+ public boolean hasMinimum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public long getMinimum() {
+ return minimum_;
+ }
+ public Builder setMinimum(long value) {
+ bitField0_ |= 0x00000001;
+ minimum_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMinimum() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ minimum_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional sint64 maximum = 2;
+ private long maximum_ ;
+ public boolean hasMaximum() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public long getMaximum() {
+ return maximum_;
+ }
+ public Builder setMaximum(long value) {
+ bitField0_ |= 0x00000002;
+ maximum_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMaximum() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ maximum_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional sint64 sum = 3;
+ private long sum_ ;
+ public boolean hasSum() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public long getSum() {
+ return sum_;
+ }
+ public Builder setSum(long value) {
+ bitField0_ |= 0x00000004;
+ sum_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearSum() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ sum_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.ql.io.orc.IntegerStatistics)
+ }
+
+ static {
+ defaultInstance = new IntegerStatistics(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.ql.io.orc.IntegerStatistics)
+ }
+
+ public interface DoubleStatisticsOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional double minimum = 1;
+ boolean hasMinimum();
+ double getMinimum();
+
+ // optional double maximum = 2;
+ boolean hasMaximum();
+ double getMaximum();
+
+ // optional double sum = 3;
+ boolean hasSum();
+ double getSum();
+ }
+ public static final class DoubleStatistics extends
+ com.google.protobuf.GeneratedMessage
+ implements DoubleStatisticsOrBuilder {
+ // Use DoubleStatistics.newBuilder() to construct.
+ private DoubleStatistics(Builder builder) {
+ super(builder);
+ }
+ private DoubleStatistics(boolean noInit) {}
+
+ private static final DoubleStatistics defaultInstance;
+ public static DoubleStatistics getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public DoubleStatistics getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_DoubleStatistics_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_DoubleStatistics_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // optional double minimum = 1;
+ public static final int MINIMUM_FIELD_NUMBER = 1;
+ private double minimum_;
+ public boolean hasMinimum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public double getMinimum() {
+ return minimum_;
+ }
+
+ // optional double maximum = 2;
+ public static final int MAXIMUM_FIELD_NUMBER = 2;
+ private double maximum_;
+ public boolean hasMaximum() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public double getMaximum() {
+ return maximum_;
+ }
+
+ // optional double sum = 3;
+ public static final int SUM_FIELD_NUMBER = 3;
+ private double sum_;
+ public boolean hasSum() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public double getSum() {
+ return sum_;
+ }
+
+ private void initFields() {
+ minimum_ = 0D;
+ maximum_ = 0D;
+ sum_ = 0D;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeDouble(1, minimum_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeDouble(2, maximum_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeDouble(3, sum_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeDoubleSize(1, minimum_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeDoubleSize(2, maximum_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeDoubleSize(3, sum_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatisticsOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_DoubleStatistics_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_DoubleStatistics_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ minimum_ = 0D;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ maximum_ = 0D;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ sum_ = 0D;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics.getDescriptor();
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics getDefaultInstanceForType() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics build() {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics buildPartial() {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics result = new org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.minimum_ = minimum_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.maximum_ = maximum_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.sum_ = sum_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics) {
+ return mergeFrom((org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics other) {
+ if (other == org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics.getDefaultInstance()) return this;
+ if (other.hasMinimum()) {
+ setMinimum(other.getMinimum());
+ }
+ if (other.hasMaximum()) {
+ setMaximum(other.getMaximum());
+ }
+ if (other.hasSum()) {
+ setSum(other.getSum());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 9: {
+ bitField0_ |= 0x00000001;
+ minimum_ = input.readDouble();
+ break;
+ }
+ case 17: {
+ bitField0_ |= 0x00000002;
+ maximum_ = input.readDouble();
+ break;
+ }
+ case 25: {
+ bitField0_ |= 0x00000004;
+ sum_ = input.readDouble();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // optional double minimum = 1;
+ private double minimum_ ;
+ public boolean hasMinimum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public double getMinimum() {
+ return minimum_;
+ }
+ public Builder setMinimum(double value) {
+ bitField0_ |= 0x00000001;
+ minimum_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMinimum() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ minimum_ = 0D;
+ onChanged();
+ return this;
+ }
+
+ // optional double maximum = 2;
+ private double maximum_ ;
+ public boolean hasMaximum() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public double getMaximum() {
+ return maximum_;
+ }
+ public Builder setMaximum(double value) {
+ bitField0_ |= 0x00000002;
+ maximum_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMaximum() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ maximum_ = 0D;
+ onChanged();
+ return this;
+ }
+
+ // optional double sum = 3;
+ private double sum_ ;
+ public boolean hasSum() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public double getSum() {
+ return sum_;
+ }
+ public Builder setSum(double value) {
+ bitField0_ |= 0x00000004;
+ sum_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearSum() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ sum_ = 0D;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.ql.io.orc.DoubleStatistics)
+ }
+
+ static {
+ defaultInstance = new DoubleStatistics(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.ql.io.orc.DoubleStatistics)
+ }
+
+ public interface StringStatisticsOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional string minimum = 1;
+ boolean hasMinimum();
+ String getMinimum();
+
+ // optional string maximum = 2;
+ boolean hasMaximum();
+ String getMaximum();
+ }
+ public static final class StringStatistics extends
+ com.google.protobuf.GeneratedMessage
+ implements StringStatisticsOrBuilder {
+ // Use StringStatistics.newBuilder() to construct.
+ private StringStatistics(Builder builder) {
+ super(builder);
+ }
+ private StringStatistics(boolean noInit) {}
+
+ private static final StringStatistics defaultInstance;
+ public static StringStatistics getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public StringStatistics getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_StringStatistics_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_StringStatistics_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // optional string minimum = 1;
+ public static final int MINIMUM_FIELD_NUMBER = 1;
+ private java.lang.Object minimum_;
+ public boolean hasMinimum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getMinimum() {
+ java.lang.Object ref = minimum_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ minimum_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getMinimumBytes() {
+ java.lang.Object ref = minimum_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ minimum_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional string maximum = 2;
+ public static final int MAXIMUM_FIELD_NUMBER = 2;
+ private java.lang.Object maximum_;
+ public boolean hasMaximum() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public String getMaximum() {
+ java.lang.Object ref = maximum_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ maximum_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getMaximumBytes() {
+ java.lang.Object ref = maximum_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ maximum_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ minimum_ = "";
+ maximum_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getMinimumBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, getMaximumBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getMinimumBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, getMaximumBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatisticsOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_StringStatistics_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_StringStatistics_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ minimum_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ maximum_ = "";
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics.getDescriptor();
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics getDefaultInstanceForType() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics build() {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics buildPartial() {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics result = new org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.minimum_ = minimum_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.maximum_ = maximum_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics) {
+ return mergeFrom((org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics other) {
+ if (other == org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics.getDefaultInstance()) return this;
+ if (other.hasMinimum()) {
+ setMinimum(other.getMinimum());
+ }
+ if (other.hasMaximum()) {
+ setMaximum(other.getMaximum());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ minimum_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ maximum_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // optional string minimum = 1;
+ private java.lang.Object minimum_ = "";
+ public boolean hasMinimum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getMinimum() {
+ java.lang.Object ref = minimum_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ minimum_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setMinimum(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ minimum_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMinimum() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ minimum_ = getDefaultInstance().getMinimum();
+ onChanged();
+ return this;
+ }
+ void setMinimum(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000001;
+ minimum_ = value;
+ onChanged();
+ }
+
+ // optional string maximum = 2;
+ private java.lang.Object maximum_ = "";
+ public boolean hasMaximum() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public String getMaximum() {
+ java.lang.Object ref = maximum_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ maximum_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setMaximum(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ maximum_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMaximum() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ maximum_ = getDefaultInstance().getMaximum();
+ onChanged();
+ return this;
+ }
+ void setMaximum(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000002;
+ maximum_ = value;
+ onChanged();
+ }
+
+ // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.ql.io.orc.StringStatistics)
+ }
+
+ static {
+ defaultInstance = new StringStatistics(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.ql.io.orc.StringStatistics)
+ }
+
+ public interface BucketStatisticsOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated uint64 count = 1 [packed = true];
+ java.util.List<java.lang.Long> getCountList();
+ int getCountCount();
+ long getCount(int index);
+ }
+ public static final class BucketStatistics extends
+ com.google.protobuf.GeneratedMessage
+ implements BucketStatisticsOrBuilder {
+ // Use BucketStatistics.newBuilder() to construct.
+ private BucketStatistics(Builder builder) {
+ super(builder);
+ }
+ private BucketStatistics(boolean noInit) {}
+
+ private static final BucketStatistics defaultInstance;
+ public static BucketStatistics getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public BucketStatistics getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_BucketStatistics_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_BucketStatistics_fieldAccessorTable;
+ }
+
+ // repeated uint64 count = 1 [packed = true];
+ public static final int COUNT_FIELD_NUMBER = 1;
+ private java.util.List<java.lang.Long> count_;
+ public java.util.List<java.lang.Long>
+ getCountList() {
+ return count_;
+ }
+ public int getCountCount() {
+ return count_.size();
+ }
+ public long getCount(int index) {
+ return count_.get(index);
+ }
+ private int countMemoizedSerializedSize = -1;
+
+ private void initFields() {
+ count_ = java.util.Collections.emptyList();;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (getCountList().size() > 0) {
+ output.writeRawVarint32(10);
+ output.writeRawVarint32(countMemoizedSerializedSize);
+ }
+ for (int i = 0; i < count_.size(); i++) {
+ output.writeUInt64NoTag(count_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ {
+ int dataSize = 0;
+ for (int i = 0; i < count_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeUInt64SizeNoTag(count_.get(i));
+ }
+ size += dataSize;
+ if (!getCountList().isEmpty()) {
+ size += 1;
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt32SizeNoTag(dataSize);
+ }
+ countMemoizedSerializedSize = dataSize;
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatisticsOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_BucketStatistics_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_BucketStatistics_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ count_ = java.util.Collections.emptyList();;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics.getDescriptor();
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics getDefaultInstanceForType() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics build() {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics buildPartial() {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics result = new org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics(this);
+ int from_bitField0_ = bitField0_;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ count_ = java.util.Collections.unmodifiableList(count_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.count_ = count_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics) {
+ return mergeFrom((org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics other) {
+ if (other == org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics.getDefaultInstance()) return this;
+ if (!other.count_.isEmpty()) {
+ if (count_.isEmpty()) {
+ count_ = other.count_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureCountIsMutable();
+ count_.addAll(other.count_);
+ }
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ ensureCountIsMutable();
+ count_.add(input.readUInt64());
+ break;
+ }
+ case 10: {
+ int length = input.readRawVarint32();
+ int limit = input.pushLimit(length);
+ while (input.getBytesUntilLimit() > 0) {
+ addCount(input.readUInt64());
+ }
+ input.popLimit(limit);
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // repeated uint64 count = 1 [packed = true];
+ private java.util.List<java.lang.Long> count_ = java.util.Collections.emptyList();;
+ private void ensureCountIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ count_ = new java.util.ArrayList<java.lang.Long>(count_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+ public java.util.List<java.lang.Long>
+ getCountList() {
+ return java.util.Collections.unmodifiableList(count_);
+ }
+ public int getCountCount() {
+ return count_.size();
+ }
+ public long getCount(int index) {
+ return count_.get(index);
+ }
+ public Builder setCount(
+ int index, long value) {
+ ensureCountIsMutable();
+ count_.set(index, value);
+ onChanged();
+ return this;
+ }
+ public Builder addCount(long value) {
+ ensureCountIsMutable();
+ count_.add(value);
+ onChanged();
+ return this;
+ }
+ public Builder addAllCount(
+ java.lang.Iterable<? extends java.lang.Long> values) {
+ ensureCountIsMutable();
+ super.addAll(values, count_);
+ onChanged();
+ return this;
+ }
+ public Builder clearCount() {
+ count_ = java.util.Collections.emptyList();;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.ql.io.orc.BucketStatistics)
+ }
+
+ static {
+ defaultInstance = new BucketStatistics(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.ql.io.orc.BucketStatistics)
+ }
+
+ public interface ColumnStatisticsOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional uint64 numberOfValues = 1;
+ boolean hasNumberOfValues();
+ long getNumberOfValues();
+
+ // optional .org.apache.hadoop.hive.ql.io.orc.IntegerStatistics intStatistics = 2;
+ boolean hasIntStatistics();
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics getIntStatistics();
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatisticsOrBuilder getIntStatisticsOrBuilder();
+
+ // optional .org.apache.hadoop.hive.ql.io.orc.DoubleStatistics doubleStatistics = 3;
+ boolean hasDoubleStatistics();
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics getDoubleStatistics();
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatisticsOrBuilder getDoubleStatisticsOrBuilder();
+
+ // optional .org.apache.hadoop.hive.ql.io.orc.StringStatistics stringStatistics = 4;
+ boolean hasStringStatistics();
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics getStringStatistics();
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatisticsOrBuilder getStringStatisticsOrBuilder();
+
+ // optional .org.apache.hadoop.hive.ql.io.orc.BucketStatistics bucketStatistics = 5;
+ boolean hasBucketStatistics();
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics getBucketStatistics();
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatisticsOrBuilder getBucketStatisticsOrBuilder();
+ }
+ public static final class ColumnStatistics extends
+ com.google.protobuf.GeneratedMessage
+ implements ColumnStatisticsOrBuilder {
+ // Use ColumnStatistics.newBuilder() to construct.
+ private ColumnStatistics(Builder builder) {
+ super(builder);
+ }
+ private ColumnStatistics(boolean noInit) {}
+
+ private static final ColumnStatistics defaultInstance;
+ public static ColumnStatistics getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ColumnStatistics getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // optional uint64 numberOfValues = 1;
+ public static final int NUMBEROFVALUES_FIELD_NUMBER = 1;
+ private long numberOfValues_;
+ public boolean hasNumberOfValues() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public long getNumberOfValues() {
+ return numberOfValues_;
+ }
+
+ // optional .org.apache.hadoop.hive.ql.io.orc.IntegerStatistics intStatistics = 2;
+ public static final int INTSTATISTICS_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics intStatistics_;
+ public boolean hasIntStatistics() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics getIntStatistics() {
+ return intStatistics_;
+ }
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatisticsOrBuilder getIntStatisticsOrBuilder() {
+ return intStatistics_;
+ }
+
+ // optional .org.apache.hadoop.hive.ql.io.orc.DoubleStatistics doubleStatistics = 3;
+ public static final int DOUBLESTATISTICS_FIELD_NUMBER = 3;
+ private org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics doubleStatistics_;
+ public boolean hasDoubleStatistics() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics getDoubleStatistics() {
+ return doubleStatistics_;
+ }
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatisticsOrBuilder getDoubleStatisticsOrBuilder() {
+ return doubleStatistics_;
+ }
+
+ // optional .org.apache.hadoop.hive.ql.io.orc.StringStatistics stringStatistics = 4;
+ public static final int STRINGSTATISTICS_FIELD_NUMBER = 4;
+ private org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics stringStatistics_;
+ public boolean hasStringStatistics() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics getStringStatistics() {
+ return stringStatistics_;
+ }
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatisticsOrBuilder getStringStatisticsOrBuilder() {
+ return stringStatistics_;
+ }
+
+ // optional .org.apache.hadoop.hive.ql.io.orc.BucketStatistics bucketStatistics = 5;
+ public static final int BUCKETSTATISTICS_FIELD_NUMBER = 5;
+ private org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics bucketStatistics_;
+ public boolean hasBucketStatistics() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics getBucketStatistics() {
+ return bucketStatistics_;
+ }
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatisticsOrBuilder getBucketStatisticsOrBuilder() {
+ return bucketStatistics_;
+ }
+
+ private void initFields() {
+ numberOfValues_ = 0L;
+ intStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics.getDefaultInstance();
+ doubleStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics.getDefaultInstance();
+ stringStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics.getDefaultInstance();
+ bucketStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeUInt64(1, numberOfValues_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeMessage(2, intStatistics_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeMessage(3, doubleStatistics_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeMessage(4, stringStatistics_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ output.writeMessage(5, bucketStatistics_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(1, numberOfValues_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, intStatistics_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, doubleStatistics_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(4, stringStatistics_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(5, bucketStatistics_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatisticsOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.internal_static_org_apache_hadoop_hive_ql_io_orc_ColumnStatistics_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getIntStatisticsFieldBuilder();
+ getDoubleStatisticsFieldBuilder();
+ getStringStatisticsFieldBuilder();
+ getBucketStatisticsFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ numberOfValues_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (intStatisticsBuilder_ == null) {
+ intStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics.getDefaultInstance();
+ } else {
+ intStatisticsBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ if (doubleStatisticsBuilder_ == null) {
+ doubleStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics.getDefaultInstance();
+ } else {
+ doubleStatisticsBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ if (stringStatisticsBuilder_ == null) {
+ stringStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics.getDefaultInstance();
+ } else {
+ stringStatisticsBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000008);
+ if (bucketStatisticsBuilder_ == null) {
+ bucketStatistics_ = org.apache.hadoop.hive.ql.io.orc.OrcProto.BucketStatistics.getDefaultInstance();
+ } else {
+ bucketStatisticsBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000010);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics.getDescriptor();
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics getDefaultInstanceForType() {
+ return org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics build() {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics buildPartial() {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics result = new org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.numberOfValues_ = numberOfValues_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ if (intStatisticsBuilder_ == null) {
+ result.intStatistics_ = intStatistics_;
+ } else {
+ result.intStatistics_ = intStatisticsBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ if (doubleStatisticsBuilder_ == null) {
+ result.doubleStatistics_ = doubleStatistics_;
+ } else {
+ result.doubleStatistics_ = doubleStatisticsBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ if (stringStatisticsBuilder_ == null) {
+ result.stringStatistics_ = stringStatistics_;
+ } else {
+ result.stringStatistics_ = stringStatisticsBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+ to_bitField0_ |= 0x00000010;
+ }
+ if (bucketStatisticsBuilder_ == null) {
+ result.bucketStatistics_ = bucketStatistics_;
+ } else {
+ result.bucketStatistics_ = bucketStatisticsBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics) {
+ return mergeFrom((org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics other) {
+ if (other == org.apache.hadoop.hive.ql.io.orc.OrcProto.ColumnStatistics.getDefaultInstance()) return this;
+ if (other.hasNumberOfValues()) {
+ setNumberOfValues(other.getNumberOfValues());
+ }
+ if (other.hasIntStatistics()) {
+ mergeIntStatistics(other.getIntStatistics());
+ }
+ if (other.hasDoubleStatistics()) {
+ mergeDoubleStatistics(other.getDoubleStatistics());
+ }
+ if (other.hasStringStatistics()) {
+ mergeStringStatistics(other.getStringStatistics());
+ }
+ if (other.hasBucketStatistics()) {
+ mergeBucketStatistics(other.getBucketStatistics());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ numberOfValues_ = input.readUInt64();
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics.Builder subBuilder = org.apache.hadoop.hive.ql.io.orc.OrcProto.IntegerStatistics.newBuilder();
+ if (hasIntStatistics()) {
+ subBuilder.mergeFrom(getIntStatistics());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setIntStatistics(subBuilder.buildPartial());
+ break;
+ }
+ case 26: {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics.Builder subBuilder = org.apache.hadoop.hive.ql.io.orc.OrcProto.DoubleStatistics.newBuilder();
+ if (hasDoubleStatistics()) {
+ subBuilder.mergeFrom(getDoubleStatistics());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setDoubleStatistics(subBuilder.buildPartial());
+ break;
+ }
+ case 34: {
+ org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics.Builder subBuilder = org.apache.hadoop.hive.ql.io.orc.OrcProto.StringStatistics.newBuilder();
[... 7462 lines stripped ...]