You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by te...@apache.org on 2015/10/16 20:22:18 UTC
[3/5] hbase git commit: HBASE-14406 The dataframe datasource filter
is wrong, and will result in data loss or unexpected behavior (Ted Malaska)
http://git-wip-us.apache.org/repos/asf/hbase/blob/dae1775a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/protobuf/generated/FilterProtos.java
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/protobuf/generated/FilterProtos.java b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/protobuf/generated/FilterProtos.java
new file mode 100644
index 0000000..1968d32
--- /dev/null
+++ b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/protobuf/generated/FilterProtos.java
@@ -0,0 +1,1840 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: Filter.proto
+
+package org.apache.hadoop.hbase.spark.protobuf.generated;
+
+public final class FilterProtos {
+ private FilterProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface SQLPredicatePushDownCellToColumnMappingOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bytes column_family = 1;
+ /**
+ * <code>required bytes column_family = 1;</code>
+ */
+ boolean hasColumnFamily();
+ /**
+ * <code>required bytes column_family = 1;</code>
+ */
+ com.google.protobuf.ByteString getColumnFamily();
+
+ // required bytes qualifier = 2;
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ boolean hasQualifier();
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ com.google.protobuf.ByteString getQualifier();
+
+ // required string column_name = 3;
+ /**
+ * <code>required string column_name = 3;</code>
+ */
+ boolean hasColumnName();
+ /**
+ * <code>required string column_name = 3;</code>
+ */
+ java.lang.String getColumnName();
+ /**
+ * <code>required string column_name = 3;</code>
+ */
+ com.google.protobuf.ByteString
+ getColumnNameBytes();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SQLPredicatePushDownCellToColumnMapping}
+ */
+ public static final class SQLPredicatePushDownCellToColumnMapping extends
+ com.google.protobuf.GeneratedMessage
+ implements SQLPredicatePushDownCellToColumnMappingOrBuilder {
+ // Use SQLPredicatePushDownCellToColumnMapping.newBuilder() to construct.
+ private SQLPredicatePushDownCellToColumnMapping(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private SQLPredicatePushDownCellToColumnMapping(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final SQLPredicatePushDownCellToColumnMapping defaultInstance;
+ public static SQLPredicatePushDownCellToColumnMapping getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public SQLPredicatePushDownCellToColumnMapping getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private SQLPredicatePushDownCellToColumnMapping(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ columnFamily_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ qualifier_ = input.readBytes();
+ break;
+ }
+ case 26: {
+ bitField0_ |= 0x00000004;
+ columnName_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.class, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<SQLPredicatePushDownCellToColumnMapping> PARSER =
+ new com.google.protobuf.AbstractParser<SQLPredicatePushDownCellToColumnMapping>() {
+ public SQLPredicatePushDownCellToColumnMapping parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new SQLPredicatePushDownCellToColumnMapping(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<SQLPredicatePushDownCellToColumnMapping> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required bytes column_family = 1;
+ public static final int COLUMN_FAMILY_FIELD_NUMBER = 1;
+ private com.google.protobuf.ByteString columnFamily_;
+ /**
+ * <code>required bytes column_family = 1;</code>
+ */
+ public boolean hasColumnFamily() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required bytes column_family = 1;</code>
+ */
+ public com.google.protobuf.ByteString getColumnFamily() {
+ return columnFamily_;
+ }
+
+ // required bytes qualifier = 2;
+ public static final int QUALIFIER_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString qualifier_;
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ public boolean hasQualifier() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ public com.google.protobuf.ByteString getQualifier() {
+ return qualifier_;
+ }
+
+ // required string column_name = 3;
+ public static final int COLUMN_NAME_FIELD_NUMBER = 3;
+ private java.lang.Object columnName_;
+ /**
+ * <code>required string column_name = 3;</code>
+ */
+ public boolean hasColumnName() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>required string column_name = 3;</code>
+ */
+ public java.lang.String getColumnName() {
+ java.lang.Object ref = columnName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ columnName_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>required string column_name = 3;</code>
+ */
+ public com.google.protobuf.ByteString
+ getColumnNameBytes() {
+ java.lang.Object ref = columnName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ columnName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ columnFamily_ = com.google.protobuf.ByteString.EMPTY;
+ qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ columnName_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasColumnFamily()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasQualifier()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasColumnName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, columnFamily_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, qualifier_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeBytes(3, getColumnNameBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, columnFamily_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, qualifier_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(3, getColumnNameBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping other = (org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping) obj;
+
+ boolean result = true;
+ result = result && (hasColumnFamily() == other.hasColumnFamily());
+ if (hasColumnFamily()) {
+ result = result && getColumnFamily()
+ .equals(other.getColumnFamily());
+ }
+ result = result && (hasQualifier() == other.hasQualifier());
+ if (hasQualifier()) {
+ result = result && getQualifier()
+ .equals(other.getQualifier());
+ }
+ result = result && (hasColumnName() == other.hasColumnName());
+ if (hasColumnName()) {
+ result = result && getColumnName()
+ .equals(other.getColumnName());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasColumnFamily()) {
+ hash = (37 * hash) + COLUMN_FAMILY_FIELD_NUMBER;
+ hash = (53 * hash) + getColumnFamily().hashCode();
+ }
+ if (hasQualifier()) {
+ hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
+ hash = (53 * hash) + getQualifier().hashCode();
+ }
+ if (hasColumnName()) {
+ hash = (37 * hash) + COLUMN_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getColumnName().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SQLPredicatePushDownCellToColumnMapping}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.class, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ columnFamily_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ columnName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping build() {
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping buildPartial() {
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping result = new org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.columnFamily_ = columnFamily_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.qualifier_ = qualifier_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.columnName_ = columnName_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping) {
+ return mergeFrom((org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping other) {
+ if (other == org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.getDefaultInstance()) return this;
+ if (other.hasColumnFamily()) {
+ setColumnFamily(other.getColumnFamily());
+ }
+ if (other.hasQualifier()) {
+ setQualifier(other.getQualifier());
+ }
+ if (other.hasColumnName()) {
+ bitField0_ |= 0x00000004;
+ columnName_ = other.columnName_;
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasColumnFamily()) {
+
+ return false;
+ }
+ if (!hasQualifier()) {
+
+ return false;
+ }
+ if (!hasColumnName()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required bytes column_family = 1;
+ private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>required bytes column_family = 1;</code>
+ */
+ public boolean hasColumnFamily() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required bytes column_family = 1;</code>
+ */
+ public com.google.protobuf.ByteString getColumnFamily() {
+ return columnFamily_;
+ }
+ /**
+ * <code>required bytes column_family = 1;</code>
+ */
+ public Builder setColumnFamily(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ columnFamily_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required bytes column_family = 1;</code>
+ */
+ public Builder clearColumnFamily() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ columnFamily_ = getDefaultInstance().getColumnFamily();
+ onChanged();
+ return this;
+ }
+
+ // required bytes qualifier = 2;
+ private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ public boolean hasQualifier() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ public com.google.protobuf.ByteString getQualifier() {
+ return qualifier_;
+ }
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ public Builder setQualifier(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ qualifier_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required bytes qualifier = 2;</code>
+ */
+ public Builder clearQualifier() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ qualifier_ = getDefaultInstance().getQualifier();
+ onChanged();
+ return this;
+ }
+
+ // required string column_name = 3;
+ private java.lang.Object columnName_ = "";
+ /**
+ * <code>required string column_name = 3;</code>
+ */
+ public boolean hasColumnName() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>required string column_name = 3;</code>
+ */
+ public java.lang.String getColumnName() {
+ java.lang.Object ref = columnName_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ columnName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>required string column_name = 3;</code>
+ */
+ public com.google.protobuf.ByteString
+ getColumnNameBytes() {
+ java.lang.Object ref = columnName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ columnName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>required string column_name = 3;</code>
+ */
+ public Builder setColumnName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000004;
+ columnName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string column_name = 3;</code>
+ */
+ public Builder clearColumnName() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ columnName_ = getDefaultInstance().getColumnName();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string column_name = 3;</code>
+ */
+ public Builder setColumnNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000004;
+ columnName_ = value;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.SQLPredicatePushDownCellToColumnMapping)
+ }
+
+ static {
+ defaultInstance = new SQLPredicatePushDownCellToColumnMapping(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.SQLPredicatePushDownCellToColumnMapping)
+ }
+
+ public interface SQLPredicatePushDownFilterOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string dynamic_logic_expression = 1;
+ /**
+ * <code>required string dynamic_logic_expression = 1;</code>
+ */
+ boolean hasDynamicLogicExpression();
+ /**
+ * <code>required string dynamic_logic_expression = 1;</code>
+ */
+ java.lang.String getDynamicLogicExpression();
+ /**
+ * <code>required string dynamic_logic_expression = 1;</code>
+ */
+ com.google.protobuf.ByteString
+ getDynamicLogicExpressionBytes();
+
+ // repeated bytes value_from_query_array = 2;
+ /**
+ * <code>repeated bytes value_from_query_array = 2;</code>
+ */
+ java.util.List<com.google.protobuf.ByteString> getValueFromQueryArrayList();
+ /**
+ * <code>repeated bytes value_from_query_array = 2;</code>
+ */
+ int getValueFromQueryArrayCount();
+ /**
+ * <code>repeated bytes value_from_query_array = 2;</code>
+ */
+ com.google.protobuf.ByteString getValueFromQueryArray(int index);
+
+ // repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping>
+ getCellToColumnMappingList();
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping getCellToColumnMapping(int index);
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ int getCellToColumnMappingCount();
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ java.util.List<? extends org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>
+ getCellToColumnMappingOrBuilderList();
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder getCellToColumnMappingOrBuilder(
+ int index);
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SQLPredicatePushDownFilter}
+ */
+ public static final class SQLPredicatePushDownFilter extends
+ com.google.protobuf.GeneratedMessage
+ implements SQLPredicatePushDownFilterOrBuilder {
+ // Use SQLPredicatePushDownFilter.newBuilder() to construct.
+ private SQLPredicatePushDownFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private SQLPredicatePushDownFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final SQLPredicatePushDownFilter defaultInstance;
+ public static SQLPredicatePushDownFilter getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public SQLPredicatePushDownFilter getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private SQLPredicatePushDownFilter(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ dynamicLogicExpression_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+ valueFromQueryArray_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
+ mutable_bitField0_ |= 0x00000002;
+ }
+ valueFromQueryArray_.add(input.readBytes());
+ break;
+ }
+ case 26: {
+ if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+ cellToColumnMapping_ = new java.util.ArrayList<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping>();
+ mutable_bitField0_ |= 0x00000004;
+ }
+ cellToColumnMapping_.add(input.readMessage(org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.PARSER, extensionRegistry));
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+ valueFromQueryArray_ = java.util.Collections.unmodifiableList(valueFromQueryArray_);
+ }
+ if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+ cellToColumnMapping_ = java.util.Collections.unmodifiableList(cellToColumnMapping_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.class, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<SQLPredicatePushDownFilter> PARSER =
+ new com.google.protobuf.AbstractParser<SQLPredicatePushDownFilter>() {
+ public SQLPredicatePushDownFilter parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new SQLPredicatePushDownFilter(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<SQLPredicatePushDownFilter> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required string dynamic_logic_expression = 1;
+ public static final int DYNAMIC_LOGIC_EXPRESSION_FIELD_NUMBER = 1;
+ private java.lang.Object dynamicLogicExpression_;
+ /**
+ * <code>required string dynamic_logic_expression = 1;</code>
+ */
+ public boolean hasDynamicLogicExpression() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string dynamic_logic_expression = 1;</code>
+ */
+ public java.lang.String getDynamicLogicExpression() {
+ java.lang.Object ref = dynamicLogicExpression_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ dynamicLogicExpression_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>required string dynamic_logic_expression = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getDynamicLogicExpressionBytes() {
+ java.lang.Object ref = dynamicLogicExpression_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ dynamicLogicExpression_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // repeated bytes value_from_query_array = 2;
+ public static final int VALUE_FROM_QUERY_ARRAY_FIELD_NUMBER = 2;
+ private java.util.List<com.google.protobuf.ByteString> valueFromQueryArray_;
+ /**
+ * <code>repeated bytes value_from_query_array = 2;</code>
+ */
+ public java.util.List<com.google.protobuf.ByteString>
+ getValueFromQueryArrayList() {
+ return valueFromQueryArray_;
+ }
+ /**
+ * <code>repeated bytes value_from_query_array = 2;</code>
+ */
+ public int getValueFromQueryArrayCount() {
+ return valueFromQueryArray_.size();
+ }
+ /**
+ * <code>repeated bytes value_from_query_array = 2;</code>
+ */
+ public com.google.protobuf.ByteString getValueFromQueryArray(int index) {
+ return valueFromQueryArray_.get(index);
+ }
+
+ // repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;
+ public static final int CELL_TO_COLUMN_MAPPING_FIELD_NUMBER = 3;
+ private java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping> cellToColumnMapping_;
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping> getCellToColumnMappingList() {
+ return cellToColumnMapping_;
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>
+ getCellToColumnMappingOrBuilderList() {
+ return cellToColumnMapping_;
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public int getCellToColumnMappingCount() {
+ return cellToColumnMapping_.size();
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping getCellToColumnMapping(int index) {
+ return cellToColumnMapping_.get(index);
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder getCellToColumnMappingOrBuilder(
+ int index) {
+ return cellToColumnMapping_.get(index);
+ }
+
+ private void initFields() {
+ dynamicLogicExpression_ = "";
+ valueFromQueryArray_ = java.util.Collections.emptyList();
+ cellToColumnMapping_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasDynamicLogicExpression()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ for (int i = 0; i < getCellToColumnMappingCount(); i++) {
+ if (!getCellToColumnMapping(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getDynamicLogicExpressionBytes());
+ }
+ for (int i = 0; i < valueFromQueryArray_.size(); i++) {
+ output.writeBytes(2, valueFromQueryArray_.get(i));
+ }
+ for (int i = 0; i < cellToColumnMapping_.size(); i++) {
+ output.writeMessage(3, cellToColumnMapping_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getDynamicLogicExpressionBytes());
+ }
+ {
+ int dataSize = 0;
+ for (int i = 0; i < valueFromQueryArray_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeBytesSizeNoTag(valueFromQueryArray_.get(i));
+ }
+ size += dataSize;
+ size += 1 * getValueFromQueryArrayList().size();
+ }
+ for (int i = 0; i < cellToColumnMapping_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, cellToColumnMapping_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter other = (org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter) obj;
+
+ boolean result = true;
+ result = result && (hasDynamicLogicExpression() == other.hasDynamicLogicExpression());
+ if (hasDynamicLogicExpression()) {
+ result = result && getDynamicLogicExpression()
+ .equals(other.getDynamicLogicExpression());
+ }
+ result = result && getValueFromQueryArrayList()
+ .equals(other.getValueFromQueryArrayList());
+ result = result && getCellToColumnMappingList()
+ .equals(other.getCellToColumnMappingList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasDynamicLogicExpression()) {
+ hash = (37 * hash) + DYNAMIC_LOGIC_EXPRESSION_FIELD_NUMBER;
+ hash = (53 * hash) + getDynamicLogicExpression().hashCode();
+ }
+ if (getValueFromQueryArrayCount() > 0) {
+ hash = (37 * hash) + VALUE_FROM_QUERY_ARRAY_FIELD_NUMBER;
+ hash = (53 * hash) + getValueFromQueryArrayList().hashCode();
+ }
+ if (getCellToColumnMappingCount() > 0) {
+ hash = (37 * hash) + CELL_TO_COLUMN_MAPPING_FIELD_NUMBER;
+ hash = (53 * hash) + getCellToColumnMappingList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SQLPredicatePushDownFilter}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilterOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.class, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getCellToColumnMappingFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ dynamicLogicExpression_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ valueFromQueryArray_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ if (cellToColumnMappingBuilder_ == null) {
+ cellToColumnMapping_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000004);
+ } else {
+ cellToColumnMappingBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter build() {
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter buildPartial() {
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter result = new org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.dynamicLogicExpression_ = dynamicLogicExpression_;
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ valueFromQueryArray_ = java.util.Collections.unmodifiableList(valueFromQueryArray_);
+ bitField0_ = (bitField0_ & ~0x00000002);
+ }
+ result.valueFromQueryArray_ = valueFromQueryArray_;
+ if (cellToColumnMappingBuilder_ == null) {
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ cellToColumnMapping_ = java.util.Collections.unmodifiableList(cellToColumnMapping_);
+ bitField0_ = (bitField0_ & ~0x00000004);
+ }
+ result.cellToColumnMapping_ = cellToColumnMapping_;
+ } else {
+ result.cellToColumnMapping_ = cellToColumnMappingBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter) {
+ return mergeFrom((org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter other) {
+ if (other == org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.getDefaultInstance()) return this;
+ if (other.hasDynamicLogicExpression()) {
+ bitField0_ |= 0x00000001;
+ dynamicLogicExpression_ = other.dynamicLogicExpression_;
+ onChanged();
+ }
+ if (!other.valueFromQueryArray_.isEmpty()) {
+ if (valueFromQueryArray_.isEmpty()) {
+ valueFromQueryArray_ = other.valueFromQueryArray_;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ } else {
+ ensureValueFromQueryArrayIsMutable();
+ valueFromQueryArray_.addAll(other.valueFromQueryArray_);
+ }
+ onChanged();
+ }
+ if (cellToColumnMappingBuilder_ == null) {
+ if (!other.cellToColumnMapping_.isEmpty()) {
+ if (cellToColumnMapping_.isEmpty()) {
+ cellToColumnMapping_ = other.cellToColumnMapping_;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ } else {
+ ensureCellToColumnMappingIsMutable();
+ cellToColumnMapping_.addAll(other.cellToColumnMapping_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.cellToColumnMapping_.isEmpty()) {
+ if (cellToColumnMappingBuilder_.isEmpty()) {
+ cellToColumnMappingBuilder_.dispose();
+ cellToColumnMappingBuilder_ = null;
+ cellToColumnMapping_ = other.cellToColumnMapping_;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ cellToColumnMappingBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getCellToColumnMappingFieldBuilder() : null;
+ } else {
+ cellToColumnMappingBuilder_.addAllMessages(other.cellToColumnMapping_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasDynamicLogicExpression()) {
+
+ return false;
+ }
+ for (int i = 0; i < getCellToColumnMappingCount(); i++) {
+ if (!getCellToColumnMapping(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required string dynamic_logic_expression = 1;
+ private java.lang.Object dynamicLogicExpression_ = "";
+ /**
+ * <code>required string dynamic_logic_expression = 1;</code>
+ */
+ public boolean hasDynamicLogicExpression() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string dynamic_logic_expression = 1;</code>
+ */
+ public java.lang.String getDynamicLogicExpression() {
+ java.lang.Object ref = dynamicLogicExpression_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ dynamicLogicExpression_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>required string dynamic_logic_expression = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getDynamicLogicExpressionBytes() {
+ java.lang.Object ref = dynamicLogicExpression_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ dynamicLogicExpression_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>required string dynamic_logic_expression = 1;</code>
+ */
+ public Builder setDynamicLogicExpression(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ dynamicLogicExpression_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string dynamic_logic_expression = 1;</code>
+ */
+ public Builder clearDynamicLogicExpression() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ dynamicLogicExpression_ = getDefaultInstance().getDynamicLogicExpression();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string dynamic_logic_expression = 1;</code>
+ */
+ public Builder setDynamicLogicExpressionBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ dynamicLogicExpression_ = value;
+ onChanged();
+ return this;
+ }
+
+ // repeated bytes value_from_query_array = 2;
+ private java.util.List<com.google.protobuf.ByteString> valueFromQueryArray_ = java.util.Collections.emptyList();
+ private void ensureValueFromQueryArrayIsMutable() {
+ if (!((bitField0_ & 0x00000002) == 0x00000002)) {
+ valueFromQueryArray_ = new java.util.ArrayList<com.google.protobuf.ByteString>(valueFromQueryArray_);
+ bitField0_ |= 0x00000002;
+ }
+ }
+ /**
+ * <code>repeated bytes value_from_query_array = 2;</code>
+ */
+ public java.util.List<com.google.protobuf.ByteString>
+ getValueFromQueryArrayList() {
+ return java.util.Collections.unmodifiableList(valueFromQueryArray_);
+ }
+ /**
+ * <code>repeated bytes value_from_query_array = 2;</code>
+ */
+ public int getValueFromQueryArrayCount() {
+ return valueFromQueryArray_.size();
+ }
+ /**
+ * <code>repeated bytes value_from_query_array = 2;</code>
+ */
+ public com.google.protobuf.ByteString getValueFromQueryArray(int index) {
+ return valueFromQueryArray_.get(index);
+ }
+ /**
+ * <code>repeated bytes value_from_query_array = 2;</code>
+ */
+ public Builder setValueFromQueryArray(
+ int index, com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureValueFromQueryArrayIsMutable();
+ valueFromQueryArray_.set(index, value);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated bytes value_from_query_array = 2;</code>
+ */
+ public Builder addValueFromQueryArray(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureValueFromQueryArrayIsMutable();
+ valueFromQueryArray_.add(value);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated bytes value_from_query_array = 2;</code>
+ */
+ public Builder addAllValueFromQueryArray(
+ java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
+ ensureValueFromQueryArrayIsMutable();
+ super.addAll(values, valueFromQueryArray_);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated bytes value_from_query_array = 2;</code>
+ */
+ public Builder clearValueFromQueryArray() {
+ valueFromQueryArray_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000002);
+ onChanged();
+ return this;
+ }
+
+ // repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;
+ private java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping> cellToColumnMapping_ =
+ java.util.Collections.emptyList();
+ private void ensureCellToColumnMappingIsMutable() {
+ if (!((bitField0_ & 0x00000004) == 0x00000004)) {
+ cellToColumnMapping_ = new java.util.ArrayList<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping>(cellToColumnMapping_);
+ bitField0_ |= 0x00000004;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder> cellToColumnMappingBuilder_;
+
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping> getCellToColumnMappingList() {
+ if (cellToColumnMappingBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(cellToColumnMapping_);
+ } else {
+ return cellToColumnMappingBuilder_.getMessageList();
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public int getCellToColumnMappingCount() {
+ if (cellToColumnMappingBuilder_ == null) {
+ return cellToColumnMapping_.size();
+ } else {
+ return cellToColumnMappingBuilder_.getCount();
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping getCellToColumnMapping(int index) {
+ if (cellToColumnMappingBuilder_ == null) {
+ return cellToColumnMapping_.get(index);
+ } else {
+ return cellToColumnMappingBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public Builder setCellToColumnMapping(
+ int index, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping value) {
+ if (cellToColumnMappingBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureCellToColumnMappingIsMutable();
+ cellToColumnMapping_.set(index, value);
+ onChanged();
+ } else {
+ cellToColumnMappingBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public Builder setCellToColumnMapping(
+ int index, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder builderForValue) {
+ if (cellToColumnMappingBuilder_ == null) {
+ ensureCellToColumnMappingIsMutable();
+ cellToColumnMapping_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ cellToColumnMappingBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public Builder addCellToColumnMapping(org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping value) {
+ if (cellToColumnMappingBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureCellToColumnMappingIsMutable();
+ cellToColumnMapping_.add(value);
+ onChanged();
+ } else {
+ cellToColumnMappingBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public Builder addCellToColumnMapping(
+ int index, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping value) {
+ if (cellToColumnMappingBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureCellToColumnMappingIsMutable();
+ cellToColumnMapping_.add(index, value);
+ onChanged();
+ } else {
+ cellToColumnMappingBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public Builder addCellToColumnMapping(
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder builderForValue) {
+ if (cellToColumnMappingBuilder_ == null) {
+ ensureCellToColumnMappingIsMutable();
+ cellToColumnMapping_.add(builderForValue.build());
+ onChanged();
+ } else {
+ cellToColumnMappingBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public Builder addCellToColumnMapping(
+ int index, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder builderForValue) {
+ if (cellToColumnMappingBuilder_ == null) {
+ ensureCellToColumnMappingIsMutable();
+ cellToColumnMapping_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ cellToColumnMappingBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public Builder addAllCellToColumnMapping(
+ java.lang.Iterable<? extends org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping> values) {
+ if (cellToColumnMappingBuilder_ == null) {
+ ensureCellToColumnMappingIsMutable();
+ super.addAll(values, cellToColumnMapping_);
+ onChanged();
+ } else {
+ cellToColumnMappingBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public Builder clearCellToColumnMapping() {
+ if (cellToColumnMappingBuilder_ == null) {
+ cellToColumnMapping_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000004);
+ onChanged();
+ } else {
+ cellToColumnMappingBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public Builder removeCellToColumnMapping(int index) {
+ if (cellToColumnMappingBuilder_ == null) {
+ ensureCellToColumnMappingIsMutable();
+ cellToColumnMapping_.remove(index);
+ onChanged();
+ } else {
+ cellToColumnMappingBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder getCellToColumnMappingBuilder(
+ int index) {
+ return getCellToColumnMappingFieldBuilder().getBuilder(index);
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder getCellToColumnMappingOrBuilder(
+ int index) {
+ if (cellToColumnMappingBuilder_ == null) {
+ return cellToColumnMapping_.get(index); } else {
+ return cellToColumnMappingBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>
+ getCellToColumnMappingOrBuilderList() {
+ if (cellToColumnMappingBuilder_ != null) {
+ return cellToColumnMappingBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(cellToColumnMapping_);
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder addCellToColumnMappingBuilder() {
+ return getCellToColumnMappingFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder addCellToColumnMappingBuilder(
+ int index) {
+ return getCellToColumnMappingFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder>
+ getCellToColumnMappingBuilderList() {
+ return getCellToColumnMappingFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>
+ getCellToColumnMappingFieldBuilder() {
+ if (cellToColumnMappingBuilder_ == null) {
+ cellToColumnMappingBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>(
+ cellToColumnMapping_,
+ ((bitField0_ & 0x00000004) == 0x00000004),
+ getParentForChildren(),
+ isClean());
+ cellToColumnMapping_ = null;
+ }
+ return cellToColumnMappingBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.SQLPredicatePushDownFilter)
+ }
+
+ static {
+ defaultInstance = new SQLPredicatePushDownFilter(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.SQLPredicatePushDownFilter)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_SQLPredicatePushDownFilter_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_SQLPredicatePushDownFilter_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\014Filter.proto\022\010hbase.pb\"h\n\'SQLPredicate" +
+ "PushDownCellToColumnMapping\022\025\n\rcolumn_fa" +
+ "mily\030\001 \002(\014\022\021\n\tqualifier\030\002 \002(\014\022\023\n\013column_" +
+ "name\030\003 \002(\t\"\261\001\n\032SQLPredicatePushDownFilte" +
+ "r\022 \n\030dynamic_logic_expression\030\001 \002(\t\022\036\n\026v" +
+ "alue_from_query_array\030\002 \003(\014\022Q\n\026cell_to_c" +
+ "olumn_mapping\030\003 \003(\01321.hbase.pb.SQLPredic" +
+ "atePushDownCellToColumnMappingBH\n0org.ap" +
+ "ache.hadoop.hbase.spark.protobuf.generat" +
+ "edB\014FilterProtosH\001\210\001\001\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_descriptor,
+ new java.lang.String[] { "ColumnFamily", "Qualifier", "ColumnName", });
+ internal_static_hbase_pb_SQLPredicatePushDownFilter_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_hbase_pb_SQLPredicatePushDownFilter_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_SQLPredicatePushDownFilter_descriptor,
+ new java.lang.String[] { "DynamicLogicExpression", "ValueFromQueryArray", "CellToColumnMapping", });
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/dae1775a/hbase-spark/src/main/protobuf/Filter.proto
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/protobuf/Filter.proto b/hbase-spark/src/main/protobuf/Filter.proto
new file mode 100644
index 0000000..e076ce8
--- /dev/null
+++ b/hbase-spark/src/main/protobuf/Filter.proto
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// This file contains protocol buffers that are used for filters
+package hbase.pb;
+
+option java_package = "org.apache.hadoop.hbase.spark.protobuf.generated";
+option java_outer_classname = "FilterProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+message SQLPredicatePushDownCellToColumnMapping {
+ required bytes column_family = 1;
+ required bytes qualifier = 2;
+ required string column_name = 3;
+}
+
+message SQLPredicatePushDownFilter {
+ required string dynamic_logic_expression = 1;
+ repeated bytes value_from_query_array = 2;
+ repeated SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hbase/blob/dae1775a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ByteArrayComparable.scala
----------------------------------------------------------------------
diff --git a/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ByteArrayComparable.scala b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ByteArrayComparable.scala
new file mode 100644
index 0000000..fce92fb
--- /dev/null
+++ b/hbase-spark/src/main/scala/org/apache/hadoop/hbase/spark/ByteArrayComparable.scala
@@ -0,0 +1,47 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.spark
+
+import org.apache.hadoop.hbase.util.Bytes
+
+class ByteArrayComparable(val bytes:Array[Byte], val offset:Int = 0, var length:Int = -1)
+ extends Comparable[ByteArrayComparable] {
+
+ if (length == -1) {
+ length = bytes.length
+ }
+
+ override def compareTo(o: ByteArrayComparable): Int = {
+ Bytes.compareTo(bytes, offset, length, o.bytes, o.offset, o.length)
+ }
+
+ override def hashCode(): Int = {
+ Bytes.hashCode(bytes, offset, length)
+ }
+
+ override def equals (obj: Any): Boolean = {
+ obj match {
+ case b: ByteArrayComparable =>
+ Bytes.equals(bytes, offset, length, b.bytes, b.offset, b.length)
+ case _ =>
+ false
+ }
+ }
+}