You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by ve...@apache.org on 2016/08/07 19:51:10 UTC
[1/5] drill git commit: DRILL-4728: Add support for new metadata
fetch APIs
Repository: drill
Updated Branches:
refs/heads/master 4bd67a660 -> ef6e522c9
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/TableMetadata.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/TableMetadata.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/TableMetadata.java
new file mode 100644
index 0000000..89dfb91
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/TableMetadata.java
@@ -0,0 +1,229 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+import com.dyuproject.protostuff.GraphIOUtil;
+import com.dyuproject.protostuff.Input;
+import com.dyuproject.protostuff.Message;
+import com.dyuproject.protostuff.Output;
+import com.dyuproject.protostuff.Schema;
+
+public final class TableMetadata implements Externalizable, Message<TableMetadata>, Schema<TableMetadata>
+{
+
+ public static Schema<TableMetadata> getSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ public static TableMetadata getDefaultInstance()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ static final TableMetadata DEFAULT_INSTANCE = new TableMetadata();
+
+
+ private String catalogName;
+ private String schemaName;
+ private String tableName;
+ private String type;
+
+ public TableMetadata()
+ {
+
+ }
+
+ // getters and setters
+
+ // catalogName
+
+ public String getCatalogName()
+ {
+ return catalogName;
+ }
+
+ public TableMetadata setCatalogName(String catalogName)
+ {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ // schemaName
+
+ public String getSchemaName()
+ {
+ return schemaName;
+ }
+
+ public TableMetadata setSchemaName(String schemaName)
+ {
+ this.schemaName = schemaName;
+ return this;
+ }
+
+ // tableName
+
+ public String getTableName()
+ {
+ return tableName;
+ }
+
+ public TableMetadata setTableName(String tableName)
+ {
+ this.tableName = tableName;
+ return this;
+ }
+
+ // type
+
+ public String getType()
+ {
+ return type;
+ }
+
+ public TableMetadata setType(String type)
+ {
+ this.type = type;
+ return this;
+ }
+
+ // java serialization
+
+ public void readExternal(ObjectInput in) throws IOException
+ {
+ GraphIOUtil.mergeDelimitedFrom(in, this, this);
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException
+ {
+ GraphIOUtil.writeDelimitedTo(out, this, this);
+ }
+
+ // message method
+
+ public Schema<TableMetadata> cachedSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ // schema methods
+
+ public TableMetadata newMessage()
+ {
+ return new TableMetadata();
+ }
+
+ public Class<TableMetadata> typeClass()
+ {
+ return TableMetadata.class;
+ }
+
+ public String messageName()
+ {
+ return TableMetadata.class.getSimpleName();
+ }
+
+ public String messageFullName()
+ {
+ return TableMetadata.class.getName();
+ }
+
+ public boolean isInitialized(TableMetadata message)
+ {
+ return true;
+ }
+
+ public void mergeFrom(Input input, TableMetadata message) throws IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ message.catalogName = input.readString();
+ break;
+ case 2:
+ message.schemaName = input.readString();
+ break;
+ case 3:
+ message.tableName = input.readString();
+ break;
+ case 4:
+ message.type = input.readString();
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+
+
+ public void writeTo(Output output, TableMetadata message) throws IOException
+ {
+ if(message.catalogName != null)
+ output.writeString(1, message.catalogName, false);
+
+ if(message.schemaName != null)
+ output.writeString(2, message.schemaName, false);
+
+ if(message.tableName != null)
+ output.writeString(3, message.tableName, false);
+
+ if(message.type != null)
+ output.writeString(4, message.type, false);
+ }
+
+ public String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogName";
+ case 2: return "schemaName";
+ case 3: return "tableName";
+ case 4: return "type";
+ default: return null;
+ }
+ }
+
+ public int getFieldNumber(String name)
+ {
+ final Integer number = __fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+
+ private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
+ static
+ {
+ __fieldMap.put("catalogName", 1);
+ __fieldMap.put("schemaName", 2);
+ __fieldMap.put("tableName", 3);
+ __fieldMap.put("type", 4);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/protobuf/User.proto
----------------------------------------------------------------------
diff --git a/protocol/src/main/protobuf/User.proto b/protocol/src/main/protobuf/User.proto
index da44955..6af6fcf 100644
--- a/protocol/src/main/protobuf/User.proto
+++ b/protocol/src/main/protobuf/User.proto
@@ -23,11 +23,19 @@ enum RpcType {
REQUEST_RESULTS = 5;
RESUME_PAUSED_QUERY = 11; // user is sending a query resume request to the drillbit
GET_QUERY_PLAN_FRAGMENTS = 12; // to get plan fragments from query
+ GET_CATALOGS = 14; // user is requesting metadata of catalog(s).
+ GET_SCHEMAS = 15; // user is requesting metadata of schema(s)
+ GET_TABLES = 16; // user is requesting metadata of table(s)
+ GET_COLUMNS = 17; // user is requesting metadata of column(s)
// bit to user
QUERY_DATA = 6; // drillbit is sending a query result data batch to the user
QUERY_HANDLE = 7;
- QUERY_PLAN_FRAGMENTS = 13; // return plan fragments
+ QUERY_PLAN_FRAGMENTS = 13; // return plan fragments
+ CATALOGS = 18; // return catalogs metadata in response to GET_CATALOGS
+ SCHEMAS = 19; // return schema metadata in response to GET_SCHEMAS
+ TABLES = 20; // return table metadata in response to GET_TABLES
+ COLUMNS = 21; // return column metadata in response to GET_COLUMNS
REQ_META_FUNCTIONS = 8;
RESP_FUNCTION_LIST = 9;
@@ -98,3 +106,142 @@ message BitToUserHandshake {
optional string errorId = 4;
optional string errorMessage = 5;
}
+
+/*
+ * Enum indicating the request status.
+ */
+enum RequestStatus {
+ UNKNOWN = 0;
+ OK = 1;
+ FAILED = 2;
+}
+
+/*
+ * Simple filter which encapsulates the SQL LIKE ... ESCAPE function
+ */
+message LikeFilter {
+ optional string regex = 1; // pattern to match
+ optional string escape = 2; // escape character (if any) present in the pattern
+}
+
+/*
+ * Request message for getting the metadata for catalogs satisfying the given optional filter.
+ */
+message GetCatalogsReq {
+ optional LikeFilter catalog_name_filter = 1;
+}
+
+/*
+ * Message encapsulating metadata for a Catalog.
+ */
+message CatalogMetadata {
+ optional string catalog_name = 1;
+ optional string description = 2;
+ optional string connect = 3;
+}
+
+/*
+ * Response message for GetCatalogReq.
+ */
+message GetCatalogsResp {
+ optional RequestStatus status = 1;
+ repeated CatalogMetadata catalogs = 2;
+ optional exec.shared.DrillPBError error = 3;
+}
+
+/*
+ * Request message for getting the metadata for schemas satisfying the given optional filters.
+ */
+message GetSchemasReq {
+ optional LikeFilter catalog_name_filter = 1;
+ optional LikeFilter schame_name_filter = 2;
+}
+
+/*
+ * Message encapsulating metadata for a Schema.
+ */
+message SchemaMetadata {
+ optional string catalog_name = 1;
+ optional string schema_name = 2;
+ optional string owner = 3;
+ optional string type = 4; // Type. Ex. "file", "mongodb", "hive" etc.
+ optional string mutable = 5;
+}
+
+/*
+ * Response message for GetSchemasReq.
+ */
+message GetSchemasResp {
+ optional RequestStatus status = 1;
+ repeated SchemaMetadata schemas = 2;
+ optional exec.shared.DrillPBError error = 3;
+}
+
+/*
+ * Request message for getting the metadata for tables satisfying the given optional filters.
+ */
+message GetTablesReq {
+ optional LikeFilter catalog_name_filter = 1;
+ optional LikeFilter schame_name_filter = 2;
+ optional LikeFilter table_name_filter = 3;
+}
+
+/*
+ * Message encapsulating metadata for a Table.
+ */
+message TableMetadata {
+ optional string catalog_name = 1;
+ optional string schema_name = 2;
+ optional string table_name = 3;
+ optional string type = 4; // Type. Ex. "TABLE", "VIEW" etc.
+}
+
+/*
+ * Response message for GetTablesReq.
+ */
+message GetTablesResp {
+ optional RequestStatus status = 1;
+ repeated TableMetadata tables = 2;
+ optional exec.shared.DrillPBError error = 3;
+}
+
+/*
+ * Request message for getting the metadata for columns satisfying the given optional filters.
+ */
+message GetColumnsReq {
+ optional LikeFilter catalog_name_filter = 1;
+ optional LikeFilter schame_name_filter = 2;
+ optional LikeFilter table_name_filter = 3;
+ optional LikeFilter column_name_filter = 4;
+}
+
+/*
+ * Message encapsulating metadata for a Column.
+ */
+message ColumnMetadata {
+ optional string catalog_name = 1;
+ optional string schema_name = 2;
+ optional string table_name = 3;
+ optional string column_name = 4;
+ optional int32 ordinal_position = 5;
+ optional string default_value = 6;
+ optional bool is_nullable = 7;
+ optional string data_type = 8;
+ optional int32 char_max_length = 9;
+ optional int32 char_octet_length = 10;
+ optional int32 numeric_precision = 11;
+ optional int32 numeric_precision_radix = 12;
+ optional int32 numeric_scale = 13;
+ optional int32 date_time_precision = 14;
+ optional string interval_type = 15;
+ optional int32 interval_precision = 16;
+}
+
+/*
+ * Response message for GetColumnsReq.
+ */
+message GetColumnsResp {
+ optional RequestStatus status = 1;
+ repeated ColumnMetadata columns = 2;
+ optional exec.shared.DrillPBError error = 3;
+}
\ No newline at end of file
[4/5] drill git commit: DRILL-4728: Add support for new metadata
fetch APIs
Posted by ve...@apache.org.
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/SchemaUserProtos.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/SchemaUserProtos.java b/protocol/src/main/java/org/apache/drill/exec/proto/SchemaUserProtos.java
index 10764be..9c91d1d 100644
--- a/protocol/src/main/java/org/apache/drill/exec/proto/SchemaUserProtos.java
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/SchemaUserProtos.java
@@ -1061,4 +1061,1756 @@ public final class SchemaUserProtos
}
}
+ public static final class LikeFilter
+ {
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.MessageSchema WRITE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.MessageSchema();
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.BuilderSchema MERGE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.BuilderSchema();
+
+ public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.LikeFilter>
+ {
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.LikeFilter message) throws java.io.IOException
+ {
+ if(message.hasRegex())
+ output.writeString(1, message.getRegex(), false);
+ if(message.hasEscape())
+ output.writeString(2, message.getEscape(), false);
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.LikeFilter message)
+ {
+ return message.isInitialized();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.LikeFilter> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.LikeFilter.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.LikeFilter.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.LikeFilter.class.getName();
+ }
+ //unused
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.LikeFilter message) throws java.io.IOException {}
+ public org.apache.drill.exec.proto.UserProtos.LikeFilter newMessage() { return null; }
+ }
+ public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder>
+ {
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder builder) throws java.io.IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ builder.setRegex(input.readString());
+ break;
+ case 2:
+ builder.setEscape(input.readString());
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder builder)
+ {
+ return builder.isInitialized();
+ }
+ public org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder newMessage()
+ {
+ return org.apache.drill.exec.proto.UserProtos.LikeFilter.newBuilder();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.LikeFilter.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.LikeFilter.class.getName();
+ }
+ //unused
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder builder) throws java.io.IOException {}
+ }
+ public static java.lang.String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "regex";
+ case 2: return "escape";
+ default: return null;
+ }
+ }
+ public static int getFieldNumber(java.lang.String name)
+ {
+ java.lang.Integer number = fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+ private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
+ static
+ {
+ fieldMap.put("regex", 1);
+ fieldMap.put("escape", 2);
+ }
+ }
+
+ public static final class GetCatalogsReq
+ {
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsReq.MessageSchema WRITE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsReq.MessageSchema();
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsReq.BuilderSchema MERGE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsReq.BuilderSchema();
+
+ public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetCatalogsReq>
+ {
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetCatalogsReq message) throws java.io.IOException
+ {
+ if(message.hasCatalogNameFilter())
+ output.writeObject(1, message.getCatalogNameFilter(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.WRITE, false);
+
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetCatalogsReq message)
+ {
+ return message.isInitialized();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsReq.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsReq.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetCatalogsReq> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.class.getName();
+ }
+ //unused
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetCatalogsReq message) throws java.io.IOException {}
+ public org.apache.drill.exec.proto.UserProtos.GetCatalogsReq newMessage() { return null; }
+ }
+ public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.Builder>
+ {
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.Builder builder) throws java.io.IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ builder.setCatalogNameFilter(input.mergeObject(org.apache.drill.exec.proto.UserProtos.LikeFilter.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.MERGE));
+
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.Builder builder)
+ {
+ return builder.isInitialized();
+ }
+ public org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.Builder newMessage()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.newBuilder();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsReq.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsReq.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.Builder> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.Builder.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.class.getName();
+ }
+ //unused
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.Builder builder) throws java.io.IOException {}
+ }
+ public static java.lang.String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogNameFilter";
+ default: return null;
+ }
+ }
+ public static int getFieldNumber(java.lang.String name)
+ {
+ java.lang.Integer number = fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+ private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
+ static
+ {
+ fieldMap.put("catalogNameFilter", 1);
+ }
+ }
+
+ public static final class CatalogMetadata
+ {
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.CatalogMetadata.MessageSchema WRITE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.CatalogMetadata.MessageSchema();
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.CatalogMetadata.BuilderSchema MERGE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.CatalogMetadata.BuilderSchema();
+
+ public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.CatalogMetadata>
+ {
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.CatalogMetadata message) throws java.io.IOException
+ {
+ if(message.hasCatalogName())
+ output.writeString(1, message.getCatalogName(), false);
+ if(message.hasDescription())
+ output.writeString(2, message.getDescription(), false);
+ if(message.hasConnect())
+ output.writeString(3, message.getConnect(), false);
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.CatalogMetadata message)
+ {
+ return message.isInitialized();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.CatalogMetadata.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.CatalogMetadata.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.CatalogMetadata> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.CatalogMetadata.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.CatalogMetadata.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.CatalogMetadata.class.getName();
+ }
+ //unused
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.CatalogMetadata message) throws java.io.IOException {}
+ public org.apache.drill.exec.proto.UserProtos.CatalogMetadata newMessage() { return null; }
+ }
+ public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.CatalogMetadata.Builder>
+ {
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.CatalogMetadata.Builder builder) throws java.io.IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ builder.setCatalogName(input.readString());
+ break;
+ case 2:
+ builder.setDescription(input.readString());
+ break;
+ case 3:
+ builder.setConnect(input.readString());
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.CatalogMetadata.Builder builder)
+ {
+ return builder.isInitialized();
+ }
+ public org.apache.drill.exec.proto.UserProtos.CatalogMetadata.Builder newMessage()
+ {
+ return org.apache.drill.exec.proto.UserProtos.CatalogMetadata.newBuilder();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.CatalogMetadata.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.CatalogMetadata.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.CatalogMetadata.Builder> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.CatalogMetadata.Builder.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.CatalogMetadata.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.CatalogMetadata.class.getName();
+ }
+ //unused
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.CatalogMetadata.Builder builder) throws java.io.IOException {}
+ }
+ public static java.lang.String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogName";
+ case 2: return "description";
+ case 3: return "connect";
+ default: return null;
+ }
+ }
+ public static int getFieldNumber(java.lang.String name)
+ {
+ java.lang.Integer number = fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+ private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
+ static
+ {
+ fieldMap.put("catalogName", 1);
+ fieldMap.put("description", 2);
+ fieldMap.put("connect", 3);
+ }
+ }
+
+ public static final class GetCatalogsResp
+ {
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsResp.MessageSchema WRITE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsResp.MessageSchema();
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsResp.BuilderSchema MERGE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsResp.BuilderSchema();
+
+ public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetCatalogsResp>
+ {
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetCatalogsResp message) throws java.io.IOException
+ {
+ if(message.hasStatus())
+ output.writeEnum(1, message.getStatus().getNumber(), false);
+ for(org.apache.drill.exec.proto.UserProtos.CatalogMetadata catalogs : message.getCatalogsList())
+ output.writeObject(2, catalogs, org.apache.drill.exec.proto.SchemaUserProtos.CatalogMetadata.WRITE, true);
+
+ if(message.hasError())
+ output.writeObject(3, message.getError(), org.apache.drill.exec.proto.SchemaUserBitShared.DrillPBError.WRITE, false);
+
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetCatalogsResp message)
+ {
+ return message.isInitialized();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsResp.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsResp.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetCatalogsResp> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsResp.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsResp.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsResp.class.getName();
+ }
+ //unused
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetCatalogsResp message) throws java.io.IOException {}
+ public org.apache.drill.exec.proto.UserProtos.GetCatalogsResp newMessage() { return null; }
+ }
+ public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetCatalogsResp.Builder>
+ {
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetCatalogsResp.Builder builder) throws java.io.IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ builder.setStatus(org.apache.drill.exec.proto.UserProtos.RequestStatus.valueOf(input.readEnum()));
+ break;
+ case 2:
+ builder.addCatalogs(input.mergeObject(org.apache.drill.exec.proto.UserProtos.CatalogMetadata.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.CatalogMetadata.MERGE));
+
+ break;
+ case 3:
+ builder.setError(input.mergeObject(org.apache.drill.exec.proto.UserBitShared.DrillPBError.newBuilder(), org.apache.drill.exec.proto.SchemaUserBitShared.DrillPBError.MERGE));
+
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetCatalogsResp.Builder builder)
+ {
+ return builder.isInitialized();
+ }
+ public org.apache.drill.exec.proto.UserProtos.GetCatalogsResp.Builder newMessage()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsResp.newBuilder();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsResp.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetCatalogsResp.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetCatalogsResp.Builder> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsResp.Builder.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsResp.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsResp.class.getName();
+ }
+ //unused
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetCatalogsResp.Builder builder) throws java.io.IOException {}
+ }
+ public static java.lang.String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "status";
+ case 2: return "catalogs";
+ case 3: return "error";
+ default: return null;
+ }
+ }
+ public static int getFieldNumber(java.lang.String name)
+ {
+ java.lang.Integer number = fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+ private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
+ static
+ {
+ fieldMap.put("status", 1);
+ fieldMap.put("catalogs", 2);
+ fieldMap.put("error", 3);
+ }
+ }
+
+ public static final class GetSchemasReq
+ {
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasReq.MessageSchema WRITE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasReq.MessageSchema();
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasReq.BuilderSchema MERGE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasReq.BuilderSchema();
+
+ public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetSchemasReq>
+ {
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetSchemasReq message) throws java.io.IOException
+ {
+ if(message.hasCatalogNameFilter())
+ output.writeObject(1, message.getCatalogNameFilter(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.WRITE, false);
+
+ if(message.hasSchameNameFilter())
+ output.writeObject(2, message.getSchameNameFilter(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.WRITE, false);
+
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetSchemasReq message)
+ {
+ return message.isInitialized();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasReq.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasReq.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetSchemasReq> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasReq.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasReq.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasReq.class.getName();
+ }
+ //unused
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetSchemasReq message) throws java.io.IOException {}
+ public org.apache.drill.exec.proto.UserProtos.GetSchemasReq newMessage() { return null; }
+ }
+ public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetSchemasReq.Builder>
+ {
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetSchemasReq.Builder builder) throws java.io.IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ builder.setCatalogNameFilter(input.mergeObject(org.apache.drill.exec.proto.UserProtos.LikeFilter.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.MERGE));
+
+ break;
+ case 2:
+ builder.setSchameNameFilter(input.mergeObject(org.apache.drill.exec.proto.UserProtos.LikeFilter.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.MERGE));
+
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetSchemasReq.Builder builder)
+ {
+ return builder.isInitialized();
+ }
+ public org.apache.drill.exec.proto.UserProtos.GetSchemasReq.Builder newMessage()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasReq.newBuilder();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasReq.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasReq.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetSchemasReq.Builder> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasReq.Builder.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasReq.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasReq.class.getName();
+ }
+ //unused
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetSchemasReq.Builder builder) throws java.io.IOException {}
+ }
+ public static java.lang.String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogNameFilter";
+ case 2: return "schameNameFilter";
+ default: return null;
+ }
+ }
+ public static int getFieldNumber(java.lang.String name)
+ {
+ java.lang.Integer number = fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+ private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
+ static
+ {
+ fieldMap.put("catalogNameFilter", 1);
+ fieldMap.put("schameNameFilter", 2);
+ }
+ }
+
+ public static final class SchemaMetadata
+ {
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.SchemaMetadata.MessageSchema WRITE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.SchemaMetadata.MessageSchema();
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.SchemaMetadata.BuilderSchema MERGE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.SchemaMetadata.BuilderSchema();
+
+ public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.SchemaMetadata>
+ {
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.SchemaMetadata message) throws java.io.IOException
+ {
+ if(message.hasCatalogName())
+ output.writeString(1, message.getCatalogName(), false);
+ if(message.hasSchemaName())
+ output.writeString(2, message.getSchemaName(), false);
+ if(message.hasOwner())
+ output.writeString(3, message.getOwner(), false);
+ if(message.hasType())
+ output.writeString(4, message.getType(), false);
+ if(message.hasMutable())
+ output.writeString(5, message.getMutable(), false);
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.SchemaMetadata message)
+ {
+ return message.isInitialized();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.SchemaMetadata.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.SchemaMetadata.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.SchemaMetadata> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.SchemaMetadata.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.SchemaMetadata.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.SchemaMetadata.class.getName();
+ }
+ //unused
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.SchemaMetadata message) throws java.io.IOException {}
+ public org.apache.drill.exec.proto.UserProtos.SchemaMetadata newMessage() { return null; }
+ }
+ public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.SchemaMetadata.Builder>
+ {
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.SchemaMetadata.Builder builder) throws java.io.IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ builder.setCatalogName(input.readString());
+ break;
+ case 2:
+ builder.setSchemaName(input.readString());
+ break;
+ case 3:
+ builder.setOwner(input.readString());
+ break;
+ case 4:
+ builder.setType(input.readString());
+ break;
+ case 5:
+ builder.setMutable(input.readString());
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.SchemaMetadata.Builder builder)
+ {
+ return builder.isInitialized();
+ }
+ public org.apache.drill.exec.proto.UserProtos.SchemaMetadata.Builder newMessage()
+ {
+ return org.apache.drill.exec.proto.UserProtos.SchemaMetadata.newBuilder();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.SchemaMetadata.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.SchemaMetadata.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.SchemaMetadata.Builder> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.SchemaMetadata.Builder.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.SchemaMetadata.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.SchemaMetadata.class.getName();
+ }
+ //unused
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.SchemaMetadata.Builder builder) throws java.io.IOException {}
+ }
+ public static java.lang.String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogName";
+ case 2: return "schemaName";
+ case 3: return "owner";
+ case 4: return "type";
+ case 5: return "mutable";
+ default: return null;
+ }
+ }
+ public static int getFieldNumber(java.lang.String name)
+ {
+ java.lang.Integer number = fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+ private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
+ static
+ {
+ fieldMap.put("catalogName", 1);
+ fieldMap.put("schemaName", 2);
+ fieldMap.put("owner", 3);
+ fieldMap.put("type", 4);
+ fieldMap.put("mutable", 5);
+ }
+ }
+
+ public static final class GetSchemasResp
+ {
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasResp.MessageSchema WRITE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasResp.MessageSchema();
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasResp.BuilderSchema MERGE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasResp.BuilderSchema();
+
+ public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetSchemasResp>
+ {
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetSchemasResp message) throws java.io.IOException
+ {
+ if(message.hasStatus())
+ output.writeEnum(1, message.getStatus().getNumber(), false);
+ for(org.apache.drill.exec.proto.UserProtos.SchemaMetadata schemas : message.getSchemasList())
+ output.writeObject(2, schemas, org.apache.drill.exec.proto.SchemaUserProtos.SchemaMetadata.WRITE, true);
+
+ if(message.hasError())
+ output.writeObject(3, message.getError(), org.apache.drill.exec.proto.SchemaUserBitShared.DrillPBError.WRITE, false);
+
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetSchemasResp message)
+ {
+ return message.isInitialized();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasResp.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasResp.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetSchemasResp> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasResp.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasResp.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasResp.class.getName();
+ }
+ //unused
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetSchemasResp message) throws java.io.IOException {}
+ public org.apache.drill.exec.proto.UserProtos.GetSchemasResp newMessage() { return null; }
+ }
+ public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetSchemasResp.Builder>
+ {
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetSchemasResp.Builder builder) throws java.io.IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ builder.setStatus(org.apache.drill.exec.proto.UserProtos.RequestStatus.valueOf(input.readEnum()));
+ break;
+ case 2:
+ builder.addSchemas(input.mergeObject(org.apache.drill.exec.proto.UserProtos.SchemaMetadata.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.SchemaMetadata.MERGE));
+
+ break;
+ case 3:
+ builder.setError(input.mergeObject(org.apache.drill.exec.proto.UserBitShared.DrillPBError.newBuilder(), org.apache.drill.exec.proto.SchemaUserBitShared.DrillPBError.MERGE));
+
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetSchemasResp.Builder builder)
+ {
+ return builder.isInitialized();
+ }
+ public org.apache.drill.exec.proto.UserProtos.GetSchemasResp.Builder newMessage()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasResp.newBuilder();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasResp.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetSchemasResp.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetSchemasResp.Builder> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasResp.Builder.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasResp.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetSchemasResp.class.getName();
+ }
+ //unused
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetSchemasResp.Builder builder) throws java.io.IOException {}
+ }
+ public static java.lang.String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "status";
+ case 2: return "schemas";
+ case 3: return "error";
+ default: return null;
+ }
+ }
+ public static int getFieldNumber(java.lang.String name)
+ {
+ java.lang.Integer number = fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+ private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
+ static
+ {
+ fieldMap.put("status", 1);
+ fieldMap.put("schemas", 2);
+ fieldMap.put("error", 3);
+ }
+ }
+
+ public static final class GetTablesReq
+ {
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetTablesReq.MessageSchema WRITE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetTablesReq.MessageSchema();
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetTablesReq.BuilderSchema MERGE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetTablesReq.BuilderSchema();
+
+ public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetTablesReq>
+ {
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetTablesReq message) throws java.io.IOException
+ {
+ if(message.hasCatalogNameFilter())
+ output.writeObject(1, message.getCatalogNameFilter(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.WRITE, false);
+
+ if(message.hasSchameNameFilter())
+ output.writeObject(2, message.getSchameNameFilter(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.WRITE, false);
+
+ if(message.hasTableNameFilter())
+ output.writeObject(3, message.getTableNameFilter(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.WRITE, false);
+
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetTablesReq message)
+ {
+ return message.isInitialized();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetTablesReq.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetTablesReq.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetTablesReq> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesReq.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesReq.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesReq.class.getName();
+ }
+ //unused
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetTablesReq message) throws java.io.IOException {}
+ public org.apache.drill.exec.proto.UserProtos.GetTablesReq newMessage() { return null; }
+ }
+ public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetTablesReq.Builder>
+ {
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetTablesReq.Builder builder) throws java.io.IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ builder.setCatalogNameFilter(input.mergeObject(org.apache.drill.exec.proto.UserProtos.LikeFilter.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.MERGE));
+
+ break;
+ case 2:
+ builder.setSchameNameFilter(input.mergeObject(org.apache.drill.exec.proto.UserProtos.LikeFilter.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.MERGE));
+
+ break;
+ case 3:
+ builder.setTableNameFilter(input.mergeObject(org.apache.drill.exec.proto.UserProtos.LikeFilter.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.MERGE));
+
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetTablesReq.Builder builder)
+ {
+ return builder.isInitialized();
+ }
+ public org.apache.drill.exec.proto.UserProtos.GetTablesReq.Builder newMessage()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesReq.newBuilder();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetTablesReq.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetTablesReq.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetTablesReq.Builder> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesReq.Builder.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesReq.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesReq.class.getName();
+ }
+ //unused
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetTablesReq.Builder builder) throws java.io.IOException {}
+ }
+ public static java.lang.String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogNameFilter";
+ case 2: return "schameNameFilter";
+ case 3: return "tableNameFilter";
+ default: return null;
+ }
+ }
+ public static int getFieldNumber(java.lang.String name)
+ {
+ java.lang.Integer number = fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+ private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
+ static
+ {
+ fieldMap.put("catalogNameFilter", 1);
+ fieldMap.put("schameNameFilter", 2);
+ fieldMap.put("tableNameFilter", 3);
+ }
+ }
+
+ public static final class TableMetadata
+ {
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.TableMetadata.MessageSchema WRITE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.TableMetadata.MessageSchema();
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.TableMetadata.BuilderSchema MERGE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.TableMetadata.BuilderSchema();
+
+ public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.TableMetadata>
+ {
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.TableMetadata message) throws java.io.IOException
+ {
+ if(message.hasCatalogName())
+ output.writeString(1, message.getCatalogName(), false);
+ if(message.hasSchemaName())
+ output.writeString(2, message.getSchemaName(), false);
+ if(message.hasTableName())
+ output.writeString(3, message.getTableName(), false);
+ if(message.hasType())
+ output.writeString(4, message.getType(), false);
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.TableMetadata message)
+ {
+ return message.isInitialized();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.TableMetadata.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.TableMetadata.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.TableMetadata> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.TableMetadata.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.TableMetadata.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.TableMetadata.class.getName();
+ }
+ //unused
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.TableMetadata message) throws java.io.IOException {}
+ public org.apache.drill.exec.proto.UserProtos.TableMetadata newMessage() { return null; }
+ }
+ public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.TableMetadata.Builder>
+ {
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.TableMetadata.Builder builder) throws java.io.IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ builder.setCatalogName(input.readString());
+ break;
+ case 2:
+ builder.setSchemaName(input.readString());
+ break;
+ case 3:
+ builder.setTableName(input.readString());
+ break;
+ case 4:
+ builder.setType(input.readString());
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.TableMetadata.Builder builder)
+ {
+ return builder.isInitialized();
+ }
+ public org.apache.drill.exec.proto.UserProtos.TableMetadata.Builder newMessage()
+ {
+ return org.apache.drill.exec.proto.UserProtos.TableMetadata.newBuilder();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.TableMetadata.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.TableMetadata.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.TableMetadata.Builder> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.TableMetadata.Builder.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.TableMetadata.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.TableMetadata.class.getName();
+ }
+ //unused
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.TableMetadata.Builder builder) throws java.io.IOException {}
+ }
+ public static java.lang.String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogName";
+ case 2: return "schemaName";
+ case 3: return "tableName";
+ case 4: return "type";
+ default: return null;
+ }
+ }
+ public static int getFieldNumber(java.lang.String name)
+ {
+ java.lang.Integer number = fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+ private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
+ static
+ {
+ fieldMap.put("catalogName", 1);
+ fieldMap.put("schemaName", 2);
+ fieldMap.put("tableName", 3);
+ fieldMap.put("type", 4);
+ }
+ }
+
+ public static final class GetTablesResp
+ {
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetTablesResp.MessageSchema WRITE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetTablesResp.MessageSchema();
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetTablesResp.BuilderSchema MERGE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetTablesResp.BuilderSchema();
+
+ public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetTablesResp>
+ {
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetTablesResp message) throws java.io.IOException
+ {
+ if(message.hasStatus())
+ output.writeEnum(1, message.getStatus().getNumber(), false);
+ for(org.apache.drill.exec.proto.UserProtos.TableMetadata tables : message.getTablesList())
+ output.writeObject(2, tables, org.apache.drill.exec.proto.SchemaUserProtos.TableMetadata.WRITE, true);
+
+ if(message.hasError())
+ output.writeObject(3, message.getError(), org.apache.drill.exec.proto.SchemaUserBitShared.DrillPBError.WRITE, false);
+
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetTablesResp message)
+ {
+ return message.isInitialized();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetTablesResp.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetTablesResp.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetTablesResp> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesResp.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesResp.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesResp.class.getName();
+ }
+ //unused
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetTablesResp message) throws java.io.IOException {}
+ public org.apache.drill.exec.proto.UserProtos.GetTablesResp newMessage() { return null; }
+ }
+ public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetTablesResp.Builder>
+ {
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetTablesResp.Builder builder) throws java.io.IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ builder.setStatus(org.apache.drill.exec.proto.UserProtos.RequestStatus.valueOf(input.readEnum()));
+ break;
+ case 2:
+ builder.addTables(input.mergeObject(org.apache.drill.exec.proto.UserProtos.TableMetadata.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.TableMetadata.MERGE));
+
+ break;
+ case 3:
+ builder.setError(input.mergeObject(org.apache.drill.exec.proto.UserBitShared.DrillPBError.newBuilder(), org.apache.drill.exec.proto.SchemaUserBitShared.DrillPBError.MERGE));
+
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetTablesResp.Builder builder)
+ {
+ return builder.isInitialized();
+ }
+ public org.apache.drill.exec.proto.UserProtos.GetTablesResp.Builder newMessage()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesResp.newBuilder();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetTablesResp.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetTablesResp.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetTablesResp.Builder> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesResp.Builder.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesResp.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetTablesResp.class.getName();
+ }
+ //unused
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetTablesResp.Builder builder) throws java.io.IOException {}
+ }
+ public static java.lang.String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "status";
+ case 2: return "tables";
+ case 3: return "error";
+ default: return null;
+ }
+ }
+ public static int getFieldNumber(java.lang.String name)
+ {
+ java.lang.Integer number = fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+ private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
+ static
+ {
+ fieldMap.put("status", 1);
+ fieldMap.put("tables", 2);
+ fieldMap.put("error", 3);
+ }
+ }
+
+ public static final class GetColumnsReq
+ {
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsReq.MessageSchema WRITE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsReq.MessageSchema();
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsReq.BuilderSchema MERGE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsReq.BuilderSchema();
+
+ public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetColumnsReq>
+ {
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetColumnsReq message) throws java.io.IOException
+ {
+ if(message.hasCatalogNameFilter())
+ output.writeObject(1, message.getCatalogNameFilter(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.WRITE, false);
+
+ if(message.hasSchameNameFilter())
+ output.writeObject(2, message.getSchameNameFilter(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.WRITE, false);
+
+ if(message.hasTableNameFilter())
+ output.writeObject(3, message.getTableNameFilter(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.WRITE, false);
+
+ if(message.hasColumnNameFilter())
+ output.writeObject(4, message.getColumnNameFilter(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.WRITE, false);
+
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetColumnsReq message)
+ {
+ return message.isInitialized();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsReq.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsReq.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetColumnsReq> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetColumnsReq.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetColumnsReq.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetColumnsReq.class.getName();
+ }
+ //unused
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetColumnsReq message) throws java.io.IOException {}
+ public org.apache.drill.exec.proto.UserProtos.GetColumnsReq newMessage() { return null; }
+ }
+ public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetColumnsReq.Builder>
+ {
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetColumnsReq.Builder builder) throws java.io.IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ builder.setCatalogNameFilter(input.mergeObject(org.apache.drill.exec.proto.UserProtos.LikeFilter.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.MERGE));
+
+ break;
+ case 2:
+ builder.setSchameNameFilter(input.mergeObject(org.apache.drill.exec.proto.UserProtos.LikeFilter.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.MERGE));
+
+ break;
+ case 3:
+ builder.setTableNameFilter(input.mergeObject(org.apache.drill.exec.proto.UserProtos.LikeFilter.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.MERGE));
+
+ break;
+ case 4:
+ builder.setColumnNameFilter(input.mergeObject(org.apache.drill.exec.proto.UserProtos.LikeFilter.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.LikeFilter.MERGE));
+
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetColumnsReq.Builder builder)
+ {
+ return builder.isInitialized();
+ }
+ public org.apache.drill.exec.proto.UserProtos.GetColumnsReq.Builder newMessage()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetColumnsReq.newBuilder();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsReq.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsReq.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetColumnsReq.Builder> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetColumnsReq.Builder.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetColumnsReq.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetColumnsReq.class.getName();
+ }
+ //unused
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetColumnsReq.Builder builder) throws java.io.IOException {}
+ }
+ public static java.lang.String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogNameFilter";
+ case 2: return "schameNameFilter";
+ case 3: return "tableNameFilter";
+ case 4: return "columnNameFilter";
+ default: return null;
+ }
+ }
+ public static int getFieldNumber(java.lang.String name)
+ {
+ java.lang.Integer number = fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+ private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
+ static
+ {
+ fieldMap.put("catalogNameFilter", 1);
+ fieldMap.put("schameNameFilter", 2);
+ fieldMap.put("tableNameFilter", 3);
+ fieldMap.put("columnNameFilter", 4);
+ }
+ }
+
+ public static final class ColumnMetadata
+ {
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.ColumnMetadata.MessageSchema WRITE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.ColumnMetadata.MessageSchema();
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.ColumnMetadata.BuilderSchema MERGE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.ColumnMetadata.BuilderSchema();
+
+ public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.ColumnMetadata>
+ {
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.ColumnMetadata message) throws java.io.IOException
+ {
+ if(message.hasCatalogName())
+ output.writeString(1, message.getCatalogName(), false);
+ if(message.hasSchemaName())
+ output.writeString(2, message.getSchemaName(), false);
+ if(message.hasTableName())
+ output.writeString(3, message.getTableName(), false);
+ if(message.hasColumnName())
+ output.writeString(4, message.getColumnName(), false);
+ if(message.hasOrdinalPosition())
+ output.writeInt32(5, message.getOrdinalPosition(), false);
+ if(message.hasDefaultValue())
+ output.writeString(6, message.getDefaultValue(), false);
+ if(message.hasIsNullable())
+ output.writeBool(7, message.getIsNullable(), false);
+ if(message.hasDataType())
+ output.writeString(8, message.getDataType(), false);
+ if(message.hasCharMaxLength())
+ output.writeInt32(9, message.getCharMaxLength(), false);
+ if(message.hasCharOctetLength())
+ output.writeInt32(10, message.getCharOctetLength(), false);
+ if(message.hasNumericPrecision())
+ output.writeInt32(11, message.getNumericPrecision(), false);
+ if(message.hasNumericPrecisionRadix())
+ output.writeInt32(12, message.getNumericPrecisionRadix(), false);
+ if(message.hasNumericScale())
+ output.writeInt32(13, message.getNumericScale(), false);
+ if(message.hasDateTimePrecision())
+ output.writeInt32(14, message.getDateTimePrecision(), false);
+ if(message.hasIntervalType())
+ output.writeString(15, message.getIntervalType(), false);
+ if(message.hasIntervalPrecision())
+ output.writeInt32(16, message.getIntervalPrecision(), false);
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.ColumnMetadata message)
+ {
+ return message.isInitialized();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.ColumnMetadata.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.ColumnMetadata.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.ColumnMetadata> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.ColumnMetadata.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.ColumnMetadata.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.ColumnMetadata.class.getName();
+ }
+ //unused
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.ColumnMetadata message) throws java.io.IOException {}
+ public org.apache.drill.exec.proto.UserProtos.ColumnMetadata newMessage() { return null; }
+ }
+ public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.ColumnMetadata.Builder>
+ {
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.ColumnMetadata.Builder builder) throws java.io.IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ builder.setCatalogName(input.readString());
+ break;
+ case 2:
+ builder.setSchemaName(input.readString());
+ break;
+ case 3:
+ builder.setTableName(input.readString());
+ break;
+ case 4:
+ builder.setColumnName(input.readString());
+ break;
+ case 5:
+ builder.setOrdinalPosition(input.readInt32());
+ break;
+ case 6:
+ builder.setDefaultValue(input.readString());
+ break;
+ case 7:
+ builder.setIsNullable(input.readBool());
+ break;
+ case 8:
+ builder.setDataType(input.readString());
+ break;
+ case 9:
+ builder.setCharMaxLength(input.readInt32());
+ break;
+ case 10:
+ builder.setCharOctetLength(input.readInt32());
+ break;
+ case 11:
+ builder.setNumericPrecision(input.readInt32());
+ break;
+ case 12:
+ builder.setNumericPrecisionRadix(input.readInt32());
+ break;
+ case 13:
+ builder.setNumericScale(input.readInt32());
+ break;
+ case 14:
+ builder.setDateTimePrecision(input.readInt32());
+ break;
+ case 15:
+ builder.setIntervalType(input.readString());
+ break;
+ case 16:
+ builder.setIntervalPrecision(input.readInt32());
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.ColumnMetadata.Builder builder)
+ {
+ return builder.isInitialized();
+ }
+ public org.apache.drill.exec.proto.UserProtos.ColumnMetadata.Builder newMessage()
+ {
+ return org.apache.drill.exec.proto.UserProtos.ColumnMetadata.newBuilder();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.ColumnMetadata.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.ColumnMetadata.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.ColumnMetadata.Builder> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.ColumnMetadata.Builder.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.ColumnMetadata.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.ColumnMetadata.class.getName();
+ }
+ //unused
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.ColumnMetadata.Builder builder) throws java.io.IOException {}
+ }
+ public static java.lang.String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogName";
+ case 2: return "schemaName";
+ case 3: return "tableName";
+ case 4: return "columnName";
+ case 5: return "ordinalPosition";
+ case 6: return "defaultValue";
+ case 7: return "isNullable";
+ case 8: return "dataType";
+ case 9: return "charMaxLength";
+ case 10: return "charOctetLength";
+ case 11: return "numericPrecision";
+ case 12: return "numericPrecisionRadix";
+ case 13: return "numericScale";
+ case 14: return "dateTimePrecision";
+ case 15: return "intervalType";
+ case 16: return "intervalPrecision";
+ default: return null;
+ }
+ }
+ public static int getFieldNumber(java.lang.String name)
+ {
+ java.lang.Integer number = fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+ private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
+ static
+ {
+ fieldMap.put("catalogName", 1);
+ fieldMap.put("schemaName", 2);
+ fieldMap.put("tableName", 3);
+ fieldMap.put("columnName", 4);
+ fieldMap.put("ordinalPosition", 5);
+ fieldMap.put("defaultValue", 6);
+ fieldMap.put("isNullable", 7);
+ fieldMap.put("dataType", 8);
+ fieldMap.put("charMaxLength", 9);
+ fieldMap.put("charOctetLength", 10);
+ fieldMap.put("numericPrecision", 11);
+ fieldMap.put("numericPrecisionRadix", 12);
+ fieldMap.put("numericScale", 13);
+ fieldMap.put("dateTimePrecision", 14);
+ fieldMap.put("intervalType", 15);
+ fieldMap.put("intervalPrecision", 16);
+ }
+ }
+
+ public static final class GetColumnsResp
+ {
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsResp.MessageSchema WRITE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsResp.MessageSchema();
+ public static final org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsResp.BuilderSchema MERGE =
+ new org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsResp.BuilderSchema();
+
+ public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetColumnsResp>
+ {
+ public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.UserProtos.GetColumnsResp message) throws java.io.IOException
+ {
+ if(message.hasStatus())
+ output.writeEnum(1, message.getStatus().getNumber(), false);
+ for(org.apache.drill.exec.proto.UserProtos.ColumnMetadata columns : message.getColumnsList())
+ output.writeObject(2, columns, org.apache.drill.exec.proto.SchemaUserProtos.ColumnMetadata.WRITE, true);
+
+ if(message.hasError())
+ output.writeObject(3, message.getError(), org.apache.drill.exec.proto.SchemaUserBitShared.DrillPBError.WRITE, false);
+
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetColumnsResp message)
+ {
+ return message.isInitialized();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsResp.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsResp.getFieldNumber(name);
+ }
+ public java.lang.Class<org.apache.drill.exec.proto.UserProtos.GetColumnsResp> typeClass()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetColumnsResp.class;
+ }
+ public java.lang.String messageName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetColumnsResp.class.getSimpleName();
+ }
+ public java.lang.String messageFullName()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetColumnsResp.class.getName();
+ }
+ //unused
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetColumnsResp message) throws java.io.IOException {}
+ public org.apache.drill.exec.proto.UserProtos.GetColumnsResp newMessage() { return null; }
+ }
+ public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.UserProtos.GetColumnsResp.Builder>
+ {
+ public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.UserProtos.GetColumnsResp.Builder builder) throws java.io.IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ builder.setStatus(org.apache.drill.exec.proto.UserProtos.RequestStatus.valueOf(input.readEnum()));
+ break;
+ case 2:
+ builder.addColumns(input.mergeObject(org.apache.drill.exec.proto.UserProtos.ColumnMetadata.newBuilder(), org.apache.drill.exec.proto.SchemaUserProtos.ColumnMetadata.MERGE));
+
+ break;
+ case 3:
+ builder.setError(input.mergeObject(org.apache.drill.exec.proto.UserBitShared.DrillPBError.newBuilder(), org.apache.drill.exec.proto.SchemaUserBitShared.DrillPBError.MERGE));
+
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+ public boolean isInitialized(org.apache.drill.exec.proto.UserProtos.GetColumnsResp.Builder builder)
+ {
+ return builder.isInitialized();
+ }
+ public org.apache.drill.exec.proto.UserProtos.GetColumnsResp.Builder newMessage()
+ {
+ return org.apache.drill.exec.proto.UserProtos.GetColumnsResp.newBuilder();
+ }
+ public java.lang.String getFieldName(int number)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsResp.getFieldName(number);
+ }
+ public int getFieldNumber(java.lang.String name)
+ {
+ return org.apache.drill.exec.proto.SchemaUserProtos.GetColumnsResp.getFieldNumber(n
<TRUNCATED>
[2/5] drill git commit: DRILL-4728: Add support for new metadata
fetch APIs
Posted by ve...@apache.org.
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/CatalogMetadata.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/CatalogMetadata.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/CatalogMetadata.java
new file mode 100644
index 0000000..30af128
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/CatalogMetadata.java
@@ -0,0 +1,207 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+import com.dyuproject.protostuff.GraphIOUtil;
+import com.dyuproject.protostuff.Input;
+import com.dyuproject.protostuff.Message;
+import com.dyuproject.protostuff.Output;
+import com.dyuproject.protostuff.Schema;
+
+public final class CatalogMetadata implements Externalizable, Message<CatalogMetadata>, Schema<CatalogMetadata>
+{
+
+ public static Schema<CatalogMetadata> getSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ public static CatalogMetadata getDefaultInstance()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ static final CatalogMetadata DEFAULT_INSTANCE = new CatalogMetadata();
+
+
+ private String catalogName;
+ private String description;
+ private String connect;
+
+ public CatalogMetadata()
+ {
+
+ }
+
+ // getters and setters
+
+ // catalogName
+
+ public String getCatalogName()
+ {
+ return catalogName;
+ }
+
+ public CatalogMetadata setCatalogName(String catalogName)
+ {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ // description
+
+ public String getDescription()
+ {
+ return description;
+ }
+
+ public CatalogMetadata setDescription(String description)
+ {
+ this.description = description;
+ return this;
+ }
+
+ // connect
+
+ public String getConnect()
+ {
+ return connect;
+ }
+
+ public CatalogMetadata setConnect(String connect)
+ {
+ this.connect = connect;
+ return this;
+ }
+
+ // java serialization
+
+ public void readExternal(ObjectInput in) throws IOException
+ {
+ GraphIOUtil.mergeDelimitedFrom(in, this, this);
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException
+ {
+ GraphIOUtil.writeDelimitedTo(out, this, this);
+ }
+
+ // message method
+
+ public Schema<CatalogMetadata> cachedSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ // schema methods
+
+ public CatalogMetadata newMessage()
+ {
+ return new CatalogMetadata();
+ }
+
+ public Class<CatalogMetadata> typeClass()
+ {
+ return CatalogMetadata.class;
+ }
+
+ public String messageName()
+ {
+ return CatalogMetadata.class.getSimpleName();
+ }
+
+ public String messageFullName()
+ {
+ return CatalogMetadata.class.getName();
+ }
+
+ public boolean isInitialized(CatalogMetadata message)
+ {
+ return true;
+ }
+
+ public void mergeFrom(Input input, CatalogMetadata message) throws IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ message.catalogName = input.readString();
+ break;
+ case 2:
+ message.description = input.readString();
+ break;
+ case 3:
+ message.connect = input.readString();
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+
+
+ public void writeTo(Output output, CatalogMetadata message) throws IOException
+ {
+ if(message.catalogName != null)
+ output.writeString(1, message.catalogName, false);
+
+ if(message.description != null)
+ output.writeString(2, message.description, false);
+
+ if(message.connect != null)
+ output.writeString(3, message.connect, false);
+ }
+
+ public String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogName";
+ case 2: return "description";
+ case 3: return "connect";
+ default: return null;
+ }
+ }
+
+ public int getFieldNumber(String name)
+ {
+ final Integer number = __fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+
+ private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
+ static
+ {
+ __fieldMap.put("catalogName", 1);
+ __fieldMap.put("description", 2);
+ __fieldMap.put("connect", 3);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/ColumnMetadata.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/ColumnMetadata.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/ColumnMetadata.java
new file mode 100644
index 0000000..a5e7e5d
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/ColumnMetadata.java
@@ -0,0 +1,493 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+import com.dyuproject.protostuff.GraphIOUtil;
+import com.dyuproject.protostuff.Input;
+import com.dyuproject.protostuff.Message;
+import com.dyuproject.protostuff.Output;
+import com.dyuproject.protostuff.Schema;
+
+public final class ColumnMetadata implements Externalizable, Message<ColumnMetadata>, Schema<ColumnMetadata>
+{
+
+ public static Schema<ColumnMetadata> getSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ public static ColumnMetadata getDefaultInstance()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ static final ColumnMetadata DEFAULT_INSTANCE = new ColumnMetadata();
+
+
+ private String catalogName;
+ private String schemaName;
+ private String tableName;
+ private String columnName;
+ private int ordinalPosition;
+ private String defaultValue;
+ private Boolean isNullable;
+ private String dataType;
+ private int charMaxLength;
+ private int charOctetLength;
+ private int numericPrecision;
+ private int numericPrecisionRadix;
+ private int numericScale;
+ private int dateTimePrecision;
+ private String intervalType;
+ private int intervalPrecision;
+
+ public ColumnMetadata()
+ {
+
+ }
+
+ // getters and setters
+
+ // catalogName
+
+ public String getCatalogName()
+ {
+ return catalogName;
+ }
+
+ public ColumnMetadata setCatalogName(String catalogName)
+ {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ // schemaName
+
+ public String getSchemaName()
+ {
+ return schemaName;
+ }
+
+ public ColumnMetadata setSchemaName(String schemaName)
+ {
+ this.schemaName = schemaName;
+ return this;
+ }
+
+ // tableName
+
+ public String getTableName()
+ {
+ return tableName;
+ }
+
+ public ColumnMetadata setTableName(String tableName)
+ {
+ this.tableName = tableName;
+ return this;
+ }
+
+ // columnName
+
+ public String getColumnName()
+ {
+ return columnName;
+ }
+
+ public ColumnMetadata setColumnName(String columnName)
+ {
+ this.columnName = columnName;
+ return this;
+ }
+
+ // ordinalPosition
+
+ public int getOrdinalPosition()
+ {
+ return ordinalPosition;
+ }
+
+ public ColumnMetadata setOrdinalPosition(int ordinalPosition)
+ {
+ this.ordinalPosition = ordinalPosition;
+ return this;
+ }
+
+ // defaultValue
+
+ public String getDefaultValue()
+ {
+ return defaultValue;
+ }
+
+ public ColumnMetadata setDefaultValue(String defaultValue)
+ {
+ this.defaultValue = defaultValue;
+ return this;
+ }
+
+ // isNullable
+
+ public Boolean getIsNullable()
+ {
+ return isNullable;
+ }
+
+ public ColumnMetadata setIsNullable(Boolean isNullable)
+ {
+ this.isNullable = isNullable;
+ return this;
+ }
+
+ // dataType
+
+ public String getDataType()
+ {
+ return dataType;
+ }
+
+ public ColumnMetadata setDataType(String dataType)
+ {
+ this.dataType = dataType;
+ return this;
+ }
+
+ // charMaxLength
+
+ public int getCharMaxLength()
+ {
+ return charMaxLength;
+ }
+
+ public ColumnMetadata setCharMaxLength(int charMaxLength)
+ {
+ this.charMaxLength = charMaxLength;
+ return this;
+ }
+
+ // charOctetLength
+
+ public int getCharOctetLength()
+ {
+ return charOctetLength;
+ }
+
+ public ColumnMetadata setCharOctetLength(int charOctetLength)
+ {
+ this.charOctetLength = charOctetLength;
+ return this;
+ }
+
+ // numericPrecision
+
+ public int getNumericPrecision()
+ {
+ return numericPrecision;
+ }
+
+ public ColumnMetadata setNumericPrecision(int numericPrecision)
+ {
+ this.numericPrecision = numericPrecision;
+ return this;
+ }
+
+ // numericPrecisionRadix
+
+ public int getNumericPrecisionRadix()
+ {
+ return numericPrecisionRadix;
+ }
+
+ public ColumnMetadata setNumericPrecisionRadix(int numericPrecisionRadix)
+ {
+ this.numericPrecisionRadix = numericPrecisionRadix;
+ return this;
+ }
+
+ // numericScale
+
+ public int getNumericScale()
+ {
+ return numericScale;
+ }
+
+ public ColumnMetadata setNumericScale(int numericScale)
+ {
+ this.numericScale = numericScale;
+ return this;
+ }
+
+ // dateTimePrecision
+
+ public int getDateTimePrecision()
+ {
+ return dateTimePrecision;
+ }
+
+ public ColumnMetadata setDateTimePrecision(int dateTimePrecision)
+ {
+ this.dateTimePrecision = dateTimePrecision;
+ return this;
+ }
+
+ // intervalType
+
+ public String getIntervalType()
+ {
+ return intervalType;
+ }
+
+ public ColumnMetadata setIntervalType(String intervalType)
+ {
+ this.intervalType = intervalType;
+ return this;
+ }
+
+ // intervalPrecision
+
+ public int getIntervalPrecision()
+ {
+ return intervalPrecision;
+ }
+
+ public ColumnMetadata setIntervalPrecision(int intervalPrecision)
+ {
+ this.intervalPrecision = intervalPrecision;
+ return this;
+ }
+
+ // java serialization
+
+ public void readExternal(ObjectInput in) throws IOException
+ {
+ GraphIOUtil.mergeDelimitedFrom(in, this, this);
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException
+ {
+ GraphIOUtil.writeDelimitedTo(out, this, this);
+ }
+
+ // message method
+
+ public Schema<ColumnMetadata> cachedSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ // schema methods
+
+ public ColumnMetadata newMessage()
+ {
+ return new ColumnMetadata();
+ }
+
+ public Class<ColumnMetadata> typeClass()
+ {
+ return ColumnMetadata.class;
+ }
+
+ public String messageName()
+ {
+ return ColumnMetadata.class.getSimpleName();
+ }
+
+ public String messageFullName()
+ {
+ return ColumnMetadata.class.getName();
+ }
+
+ public boolean isInitialized(ColumnMetadata message)
+ {
+ return true;
+ }
+
+ public void mergeFrom(Input input, ColumnMetadata message) throws IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ message.catalogName = input.readString();
+ break;
+ case 2:
+ message.schemaName = input.readString();
+ break;
+ case 3:
+ message.tableName = input.readString();
+ break;
+ case 4:
+ message.columnName = input.readString();
+ break;
+ case 5:
+ message.ordinalPosition = input.readInt32();
+ break;
+ case 6:
+ message.defaultValue = input.readString();
+ break;
+ case 7:
+ message.isNullable = input.readBool();
+ break;
+ case 8:
+ message.dataType = input.readString();
+ break;
+ case 9:
+ message.charMaxLength = input.readInt32();
+ break;
+ case 10:
+ message.charOctetLength = input.readInt32();
+ break;
+ case 11:
+ message.numericPrecision = input.readInt32();
+ break;
+ case 12:
+ message.numericPrecisionRadix = input.readInt32();
+ break;
+ case 13:
+ message.numericScale = input.readInt32();
+ break;
+ case 14:
+ message.dateTimePrecision = input.readInt32();
+ break;
+ case 15:
+ message.intervalType = input.readString();
+ break;
+ case 16:
+ message.intervalPrecision = input.readInt32();
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+
+
+ public void writeTo(Output output, ColumnMetadata message) throws IOException
+ {
+ if(message.catalogName != null)
+ output.writeString(1, message.catalogName, false);
+
+ if(message.schemaName != null)
+ output.writeString(2, message.schemaName, false);
+
+ if(message.tableName != null)
+ output.writeString(3, message.tableName, false);
+
+ if(message.columnName != null)
+ output.writeString(4, message.columnName, false);
+
+ if(message.ordinalPosition != 0)
+ output.writeInt32(5, message.ordinalPosition, false);
+
+ if(message.defaultValue != null)
+ output.writeString(6, message.defaultValue, false);
+
+ if(message.isNullable != null)
+ output.writeBool(7, message.isNullable, false);
+
+ if(message.dataType != null)
+ output.writeString(8, message.dataType, false);
+
+ if(message.charMaxLength != 0)
+ output.writeInt32(9, message.charMaxLength, false);
+
+ if(message.charOctetLength != 0)
+ output.writeInt32(10, message.charOctetLength, false);
+
+ if(message.numericPrecision != 0)
+ output.writeInt32(11, message.numericPrecision, false);
+
+ if(message.numericPrecisionRadix != 0)
+ output.writeInt32(12, message.numericPrecisionRadix, false);
+
+ if(message.numericScale != 0)
+ output.writeInt32(13, message.numericScale, false);
+
+ if(message.dateTimePrecision != 0)
+ output.writeInt32(14, message.dateTimePrecision, false);
+
+ if(message.intervalType != null)
+ output.writeString(15, message.intervalType, false);
+
+ if(message.intervalPrecision != 0)
+ output.writeInt32(16, message.intervalPrecision, false);
+ }
+
+ public String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogName";
+ case 2: return "schemaName";
+ case 3: return "tableName";
+ case 4: return "columnName";
+ case 5: return "ordinalPosition";
+ case 6: return "defaultValue";
+ case 7: return "isNullable";
+ case 8: return "dataType";
+ case 9: return "charMaxLength";
+ case 10: return "charOctetLength";
+ case 11: return "numericPrecision";
+ case 12: return "numericPrecisionRadix";
+ case 13: return "numericScale";
+ case 14: return "dateTimePrecision";
+ case 15: return "intervalType";
+ case 16: return "intervalPrecision";
+ default: return null;
+ }
+ }
+
+ public int getFieldNumber(String name)
+ {
+ final Integer number = __fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+
+ private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
+ static
+ {
+ __fieldMap.put("catalogName", 1);
+ __fieldMap.put("schemaName", 2);
+ __fieldMap.put("tableName", 3);
+ __fieldMap.put("columnName", 4);
+ __fieldMap.put("ordinalPosition", 5);
+ __fieldMap.put("defaultValue", 6);
+ __fieldMap.put("isNullable", 7);
+ __fieldMap.put("dataType", 8);
+ __fieldMap.put("charMaxLength", 9);
+ __fieldMap.put("charOctetLength", 10);
+ __fieldMap.put("numericPrecision", 11);
+ __fieldMap.put("numericPrecisionRadix", 12);
+ __fieldMap.put("numericScale", 13);
+ __fieldMap.put("dateTimePrecision", 14);
+ __fieldMap.put("intervalType", 15);
+ __fieldMap.put("intervalPrecision", 16);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetCatalogsReq.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetCatalogsReq.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetCatalogsReq.java
new file mode 100644
index 0000000..064cfbd
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetCatalogsReq.java
@@ -0,0 +1,165 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+import com.dyuproject.protostuff.GraphIOUtil;
+import com.dyuproject.protostuff.Input;
+import com.dyuproject.protostuff.Message;
+import com.dyuproject.protostuff.Output;
+import com.dyuproject.protostuff.Schema;
+
+public final class GetCatalogsReq implements Externalizable, Message<GetCatalogsReq>, Schema<GetCatalogsReq>
+{
+
+ public static Schema<GetCatalogsReq> getSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ public static GetCatalogsReq getDefaultInstance()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ static final GetCatalogsReq DEFAULT_INSTANCE = new GetCatalogsReq();
+
+
+ private LikeFilter catalogNameFilter;
+
+ public GetCatalogsReq()
+ {
+
+ }
+
+ // getters and setters
+
+ // catalogNameFilter
+
+ public LikeFilter getCatalogNameFilter()
+ {
+ return catalogNameFilter;
+ }
+
+ public GetCatalogsReq setCatalogNameFilter(LikeFilter catalogNameFilter)
+ {
+ this.catalogNameFilter = catalogNameFilter;
+ return this;
+ }
+
+ // java serialization
+
+ public void readExternal(ObjectInput in) throws IOException
+ {
+ GraphIOUtil.mergeDelimitedFrom(in, this, this);
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException
+ {
+ GraphIOUtil.writeDelimitedTo(out, this, this);
+ }
+
+ // message method
+
+ public Schema<GetCatalogsReq> cachedSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ // schema methods
+
+ public GetCatalogsReq newMessage()
+ {
+ return new GetCatalogsReq();
+ }
+
+ public Class<GetCatalogsReq> typeClass()
+ {
+ return GetCatalogsReq.class;
+ }
+
+ public String messageName()
+ {
+ return GetCatalogsReq.class.getSimpleName();
+ }
+
+ public String messageFullName()
+ {
+ return GetCatalogsReq.class.getName();
+ }
+
+ public boolean isInitialized(GetCatalogsReq message)
+ {
+ return true;
+ }
+
+ public void mergeFrom(Input input, GetCatalogsReq message) throws IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ message.catalogNameFilter = input.mergeObject(message.catalogNameFilter, LikeFilter.getSchema());
+ break;
+
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+
+
+ public void writeTo(Output output, GetCatalogsReq message) throws IOException
+ {
+ if(message.catalogNameFilter != null)
+ output.writeObject(1, message.catalogNameFilter, LikeFilter.getSchema(), false);
+
+ }
+
+ public String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogNameFilter";
+ default: return null;
+ }
+ }
+
+ public int getFieldNumber(String name)
+ {
+ final Integer number = __fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+
+ private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
+ static
+ {
+ __fieldMap.put("catalogNameFilter", 1);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetCatalogsResp.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetCatalogsResp.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetCatalogsResp.java
new file mode 100644
index 0000000..d71062d
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetCatalogsResp.java
@@ -0,0 +1,221 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.util.ArrayList;
+import java.util.List;
+
+import com.dyuproject.protostuff.GraphIOUtil;
+import com.dyuproject.protostuff.Input;
+import com.dyuproject.protostuff.Message;
+import com.dyuproject.protostuff.Output;
+import com.dyuproject.protostuff.Schema;
+
+public final class GetCatalogsResp implements Externalizable, Message<GetCatalogsResp>, Schema<GetCatalogsResp>
+{
+
+ public static Schema<GetCatalogsResp> getSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ public static GetCatalogsResp getDefaultInstance()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ static final GetCatalogsResp DEFAULT_INSTANCE = new GetCatalogsResp();
+
+
+ private RequestStatus status;
+ private List<CatalogMetadata> catalogs;
+ private DrillPBError error;
+
+ public GetCatalogsResp()
+ {
+
+ }
+
+ // getters and setters
+
+ // status
+
+ public RequestStatus getStatus()
+ {
+ return status == null ? RequestStatus.UNKNOWN : status;
+ }
+
+ public GetCatalogsResp setStatus(RequestStatus status)
+ {
+ this.status = status;
+ return this;
+ }
+
+ // catalogs
+
+ public List<CatalogMetadata> getCatalogsList()
+ {
+ return catalogs;
+ }
+
+ public GetCatalogsResp setCatalogsList(List<CatalogMetadata> catalogs)
+ {
+ this.catalogs = catalogs;
+ return this;
+ }
+
+ // error
+
+ public DrillPBError getError()
+ {
+ return error;
+ }
+
+ public GetCatalogsResp setError(DrillPBError error)
+ {
+ this.error = error;
+ return this;
+ }
+
+ // java serialization
+
+ public void readExternal(ObjectInput in) throws IOException
+ {
+ GraphIOUtil.mergeDelimitedFrom(in, this, this);
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException
+ {
+ GraphIOUtil.writeDelimitedTo(out, this, this);
+ }
+
+ // message method
+
+ public Schema<GetCatalogsResp> cachedSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ // schema methods
+
+ public GetCatalogsResp newMessage()
+ {
+ return new GetCatalogsResp();
+ }
+
+ public Class<GetCatalogsResp> typeClass()
+ {
+ return GetCatalogsResp.class;
+ }
+
+ public String messageName()
+ {
+ return GetCatalogsResp.class.getSimpleName();
+ }
+
+ public String messageFullName()
+ {
+ return GetCatalogsResp.class.getName();
+ }
+
+ public boolean isInitialized(GetCatalogsResp message)
+ {
+ return true;
+ }
+
+ public void mergeFrom(Input input, GetCatalogsResp message) throws IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ message.status = RequestStatus.valueOf(input.readEnum());
+ break;
+ case 2:
+ if(message.catalogs == null)
+ message.catalogs = new ArrayList<CatalogMetadata>();
+ message.catalogs.add(input.mergeObject(null, CatalogMetadata.getSchema()));
+ break;
+
+ case 3:
+ message.error = input.mergeObject(message.error, DrillPBError.getSchema());
+ break;
+
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+
+
+ public void writeTo(Output output, GetCatalogsResp message) throws IOException
+ {
+ if(message.status != null)
+ output.writeEnum(1, message.status.number, false);
+
+ if(message.catalogs != null)
+ {
+ for(CatalogMetadata catalogs : message.catalogs)
+ {
+ if(catalogs != null)
+ output.writeObject(2, catalogs, CatalogMetadata.getSchema(), true);
+ }
+ }
+
+
+ if(message.error != null)
+ output.writeObject(3, message.error, DrillPBError.getSchema(), false);
+
+ }
+
+ public String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "status";
+ case 2: return "catalogs";
+ case 3: return "error";
+ default: return null;
+ }
+ }
+
+ public int getFieldNumber(String name)
+ {
+ final Integer number = __fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+
+ private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
+ static
+ {
+ __fieldMap.put("status", 1);
+ __fieldMap.put("catalogs", 2);
+ __fieldMap.put("error", 3);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetColumnsReq.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetColumnsReq.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetColumnsReq.java
new file mode 100644
index 0000000..4ace257
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetColumnsReq.java
@@ -0,0 +1,237 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+import com.dyuproject.protostuff.GraphIOUtil;
+import com.dyuproject.protostuff.Input;
+import com.dyuproject.protostuff.Message;
+import com.dyuproject.protostuff.Output;
+import com.dyuproject.protostuff.Schema;
+
+public final class GetColumnsReq implements Externalizable, Message<GetColumnsReq>, Schema<GetColumnsReq>
+{
+
+ public static Schema<GetColumnsReq> getSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ public static GetColumnsReq getDefaultInstance()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ static final GetColumnsReq DEFAULT_INSTANCE = new GetColumnsReq();
+
+
+ private LikeFilter catalogNameFilter;
+ private LikeFilter schameNameFilter;
+ private LikeFilter tableNameFilter;
+ private LikeFilter columnNameFilter;
+
+ public GetColumnsReq()
+ {
+
+ }
+
+ // getters and setters
+
+ // catalogNameFilter
+
+ public LikeFilter getCatalogNameFilter()
+ {
+ return catalogNameFilter;
+ }
+
+ public GetColumnsReq setCatalogNameFilter(LikeFilter catalogNameFilter)
+ {
+ this.catalogNameFilter = catalogNameFilter;
+ return this;
+ }
+
+ // schameNameFilter
+
+ public LikeFilter getSchameNameFilter()
+ {
+ return schameNameFilter;
+ }
+
+ public GetColumnsReq setSchameNameFilter(LikeFilter schameNameFilter)
+ {
+ this.schameNameFilter = schameNameFilter;
+ return this;
+ }
+
+ // tableNameFilter
+
+ public LikeFilter getTableNameFilter()
+ {
+ return tableNameFilter;
+ }
+
+ public GetColumnsReq setTableNameFilter(LikeFilter tableNameFilter)
+ {
+ this.tableNameFilter = tableNameFilter;
+ return this;
+ }
+
+ // columnNameFilter
+
+ public LikeFilter getColumnNameFilter()
+ {
+ return columnNameFilter;
+ }
+
+ public GetColumnsReq setColumnNameFilter(LikeFilter columnNameFilter)
+ {
+ this.columnNameFilter = columnNameFilter;
+ return this;
+ }
+
+ // java serialization
+
+ public void readExternal(ObjectInput in) throws IOException
+ {
+ GraphIOUtil.mergeDelimitedFrom(in, this, this);
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException
+ {
+ GraphIOUtil.writeDelimitedTo(out, this, this);
+ }
+
+ // message method
+
+ public Schema<GetColumnsReq> cachedSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ // schema methods
+
+ public GetColumnsReq newMessage()
+ {
+ return new GetColumnsReq();
+ }
+
+ public Class<GetColumnsReq> typeClass()
+ {
+ return GetColumnsReq.class;
+ }
+
+ public String messageName()
+ {
+ return GetColumnsReq.class.getSimpleName();
+ }
+
+ public String messageFullName()
+ {
+ return GetColumnsReq.class.getName();
+ }
+
+ public boolean isInitialized(GetColumnsReq message)
+ {
+ return true;
+ }
+
+ public void mergeFrom(Input input, GetColumnsReq message) throws IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ message.catalogNameFilter = input.mergeObject(message.catalogNameFilter, LikeFilter.getSchema());
+ break;
+
+ case 2:
+ message.schameNameFilter = input.mergeObject(message.schameNameFilter, LikeFilter.getSchema());
+ break;
+
+ case 3:
+ message.tableNameFilter = input.mergeObject(message.tableNameFilter, LikeFilter.getSchema());
+ break;
+
+ case 4:
+ message.columnNameFilter = input.mergeObject(message.columnNameFilter, LikeFilter.getSchema());
+ break;
+
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+
+
+ public void writeTo(Output output, GetColumnsReq message) throws IOException
+ {
+ if(message.catalogNameFilter != null)
+ output.writeObject(1, message.catalogNameFilter, LikeFilter.getSchema(), false);
+
+
+ if(message.schameNameFilter != null)
+ output.writeObject(2, message.schameNameFilter, LikeFilter.getSchema(), false);
+
+
+ if(message.tableNameFilter != null)
+ output.writeObject(3, message.tableNameFilter, LikeFilter.getSchema(), false);
+
+
+ if(message.columnNameFilter != null)
+ output.writeObject(4, message.columnNameFilter, LikeFilter.getSchema(), false);
+
+ }
+
+ public String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogNameFilter";
+ case 2: return "schameNameFilter";
+ case 3: return "tableNameFilter";
+ case 4: return "columnNameFilter";
+ default: return null;
+ }
+ }
+
+ public int getFieldNumber(String name)
+ {
+ final Integer number = __fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+
+ private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
+ static
+ {
+ __fieldMap.put("catalogNameFilter", 1);
+ __fieldMap.put("schameNameFilter", 2);
+ __fieldMap.put("tableNameFilter", 3);
+ __fieldMap.put("columnNameFilter", 4);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetColumnsResp.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetColumnsResp.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetColumnsResp.java
new file mode 100644
index 0000000..dc22c85
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetColumnsResp.java
@@ -0,0 +1,221 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.util.ArrayList;
+import java.util.List;
+
+import com.dyuproject.protostuff.GraphIOUtil;
+import com.dyuproject.protostuff.Input;
+import com.dyuproject.protostuff.Message;
+import com.dyuproject.protostuff.Output;
+import com.dyuproject.protostuff.Schema;
+
+public final class GetColumnsResp implements Externalizable, Message<GetColumnsResp>, Schema<GetColumnsResp>
+{
+
+ public static Schema<GetColumnsResp> getSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ public static GetColumnsResp getDefaultInstance()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ static final GetColumnsResp DEFAULT_INSTANCE = new GetColumnsResp();
+
+
+ private RequestStatus status;
+ private List<ColumnMetadata> columns;
+ private DrillPBError error;
+
+ public GetColumnsResp()
+ {
+
+ }
+
+ // getters and setters
+
+ // status
+
+ public RequestStatus getStatus()
+ {
+ return status == null ? RequestStatus.UNKNOWN : status;
+ }
+
+ public GetColumnsResp setStatus(RequestStatus status)
+ {
+ this.status = status;
+ return this;
+ }
+
+ // columns
+
+ public List<ColumnMetadata> getColumnsList()
+ {
+ return columns;
+ }
+
+ public GetColumnsResp setColumnsList(List<ColumnMetadata> columns)
+ {
+ this.columns = columns;
+ return this;
+ }
+
+ // error
+
+ public DrillPBError getError()
+ {
+ return error;
+ }
+
+ public GetColumnsResp setError(DrillPBError error)
+ {
+ this.error = error;
+ return this;
+ }
+
+ // java serialization
+
+ public void readExternal(ObjectInput in) throws IOException
+ {
+ GraphIOUtil.mergeDelimitedFrom(in, this, this);
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException
+ {
+ GraphIOUtil.writeDelimitedTo(out, this, this);
+ }
+
+ // message method
+
+ public Schema<GetColumnsResp> cachedSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ // schema methods
+
+ public GetColumnsResp newMessage()
+ {
+ return new GetColumnsResp();
+ }
+
+ public Class<GetColumnsResp> typeClass()
+ {
+ return GetColumnsResp.class;
+ }
+
+ public String messageName()
+ {
+ return GetColumnsResp.class.getSimpleName();
+ }
+
+ public String messageFullName()
+ {
+ return GetColumnsResp.class.getName();
+ }
+
+ public boolean isInitialized(GetColumnsResp message)
+ {
+ return true;
+ }
+
+ public void mergeFrom(Input input, GetColumnsResp message) throws IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ message.status = RequestStatus.valueOf(input.readEnum());
+ break;
+ case 2:
+ if(message.columns == null)
+ message.columns = new ArrayList<ColumnMetadata>();
+ message.columns.add(input.mergeObject(null, ColumnMetadata.getSchema()));
+ break;
+
+ case 3:
+ message.error = input.mergeObject(message.error, DrillPBError.getSchema());
+ break;
+
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+
+
+ public void writeTo(Output output, GetColumnsResp message) throws IOException
+ {
+ if(message.status != null)
+ output.writeEnum(1, message.status.number, false);
+
+ if(message.columns != null)
+ {
+ for(ColumnMetadata columns : message.columns)
+ {
+ if(columns != null)
+ output.writeObject(2, columns, ColumnMetadata.getSchema(), true);
+ }
+ }
+
+
+ if(message.error != null)
+ output.writeObject(3, message.error, DrillPBError.getSchema(), false);
+
+ }
+
+ public String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "status";
+ case 2: return "columns";
+ case 3: return "error";
+ default: return null;
+ }
+ }
+
+ public int getFieldNumber(String name)
+ {
+ final Integer number = __fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+
+ private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
+ static
+ {
+ __fieldMap.put("status", 1);
+ __fieldMap.put("columns", 2);
+ __fieldMap.put("error", 3);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetSchemasReq.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetSchemasReq.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetSchemasReq.java
new file mode 100644
index 0000000..742b195
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetSchemasReq.java
@@ -0,0 +1,189 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+import com.dyuproject.protostuff.GraphIOUtil;
+import com.dyuproject.protostuff.Input;
+import com.dyuproject.protostuff.Message;
+import com.dyuproject.protostuff.Output;
+import com.dyuproject.protostuff.Schema;
+
+public final class GetSchemasReq implements Externalizable, Message<GetSchemasReq>, Schema<GetSchemasReq>
+{
+
+ public static Schema<GetSchemasReq> getSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ public static GetSchemasReq getDefaultInstance()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ static final GetSchemasReq DEFAULT_INSTANCE = new GetSchemasReq();
+
+
+ private LikeFilter catalogNameFilter;
+ private LikeFilter schameNameFilter;
+
+ public GetSchemasReq()
+ {
+
+ }
+
+ // getters and setters
+
+ // catalogNameFilter
+
+ public LikeFilter getCatalogNameFilter()
+ {
+ return catalogNameFilter;
+ }
+
+ public GetSchemasReq setCatalogNameFilter(LikeFilter catalogNameFilter)
+ {
+ this.catalogNameFilter = catalogNameFilter;
+ return this;
+ }
+
+ // schameNameFilter
+
+ public LikeFilter getSchameNameFilter()
+ {
+ return schameNameFilter;
+ }
+
+ public GetSchemasReq setSchameNameFilter(LikeFilter schameNameFilter)
+ {
+ this.schameNameFilter = schameNameFilter;
+ return this;
+ }
+
+ // java serialization
+
+ public void readExternal(ObjectInput in) throws IOException
+ {
+ GraphIOUtil.mergeDelimitedFrom(in, this, this);
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException
+ {
+ GraphIOUtil.writeDelimitedTo(out, this, this);
+ }
+
+ // message method
+
+ public Schema<GetSchemasReq> cachedSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ // schema methods
+
+ public GetSchemasReq newMessage()
+ {
+ return new GetSchemasReq();
+ }
+
+ public Class<GetSchemasReq> typeClass()
+ {
+ return GetSchemasReq.class;
+ }
+
+ public String messageName()
+ {
+ return GetSchemasReq.class.getSimpleName();
+ }
+
+ public String messageFullName()
+ {
+ return GetSchemasReq.class.getName();
+ }
+
+ public boolean isInitialized(GetSchemasReq message)
+ {
+ return true;
+ }
+
+ public void mergeFrom(Input input, GetSchemasReq message) throws IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ message.catalogNameFilter = input.mergeObject(message.catalogNameFilter, LikeFilter.getSchema());
+ break;
+
+ case 2:
+ message.schameNameFilter = input.mergeObject(message.schameNameFilter, LikeFilter.getSchema());
+ break;
+
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+
+
+ public void writeTo(Output output, GetSchemasReq message) throws IOException
+ {
+ if(message.catalogNameFilter != null)
+ output.writeObject(1, message.catalogNameFilter, LikeFilter.getSchema(), false);
+
+
+ if(message.schameNameFilter != null)
+ output.writeObject(2, message.schameNameFilter, LikeFilter.getSchema(), false);
+
+ }
+
+ public String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogNameFilter";
+ case 2: return "schameNameFilter";
+ default: return null;
+ }
+ }
+
+ public int getFieldNumber(String name)
+ {
+ final Integer number = __fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+
+ private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
+ static
+ {
+ __fieldMap.put("catalogNameFilter", 1);
+ __fieldMap.put("schameNameFilter", 2);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetSchemasResp.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetSchemasResp.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetSchemasResp.java
new file mode 100644
index 0000000..3dc5e84
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetSchemasResp.java
@@ -0,0 +1,221 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.util.ArrayList;
+import java.util.List;
+
+import com.dyuproject.protostuff.GraphIOUtil;
+import com.dyuproject.protostuff.Input;
+import com.dyuproject.protostuff.Message;
+import com.dyuproject.protostuff.Output;
+import com.dyuproject.protostuff.Schema;
+
+public final class GetSchemasResp implements Externalizable, Message<GetSchemasResp>, Schema<GetSchemasResp>
+{
+
+ public static Schema<GetSchemasResp> getSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ public static GetSchemasResp getDefaultInstance()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ static final GetSchemasResp DEFAULT_INSTANCE = new GetSchemasResp();
+
+
+ private RequestStatus status;
+ private List<SchemaMetadata> schemas;
+ private DrillPBError error;
+
+ public GetSchemasResp()
+ {
+
+ }
+
+ // getters and setters
+
+ // status
+
+ public RequestStatus getStatus()
+ {
+ return status == null ? RequestStatus.UNKNOWN : status;
+ }
+
+ public GetSchemasResp setStatus(RequestStatus status)
+ {
+ this.status = status;
+ return this;
+ }
+
+ // schemas
+
+ public List<SchemaMetadata> getSchemasList()
+ {
+ return schemas;
+ }
+
+ public GetSchemasResp setSchemasList(List<SchemaMetadata> schemas)
+ {
+ this.schemas = schemas;
+ return this;
+ }
+
+ // error
+
+ public DrillPBError getError()
+ {
+ return error;
+ }
+
+ public GetSchemasResp setError(DrillPBError error)
+ {
+ this.error = error;
+ return this;
+ }
+
+ // java serialization
+
+ public void readExternal(ObjectInput in) throws IOException
+ {
+ GraphIOUtil.mergeDelimitedFrom(in, this, this);
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException
+ {
+ GraphIOUtil.writeDelimitedTo(out, this, this);
+ }
+
+ // message method
+
+ public Schema<GetSchemasResp> cachedSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ // schema methods
+
+ public GetSchemasResp newMessage()
+ {
+ return new GetSchemasResp();
+ }
+
+ public Class<GetSchemasResp> typeClass()
+ {
+ return GetSchemasResp.class;
+ }
+
+ public String messageName()
+ {
+ return GetSchemasResp.class.getSimpleName();
+ }
+
+ public String messageFullName()
+ {
+ return GetSchemasResp.class.getName();
+ }
+
+ public boolean isInitialized(GetSchemasResp message)
+ {
+ return true;
+ }
+
+ public void mergeFrom(Input input, GetSchemasResp message) throws IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ message.status = RequestStatus.valueOf(input.readEnum());
+ break;
+ case 2:
+ if(message.schemas == null)
+ message.schemas = new ArrayList<SchemaMetadata>();
+ message.schemas.add(input.mergeObject(null, SchemaMetadata.getSchema()));
+ break;
+
+ case 3:
+ message.error = input.mergeObject(message.error, DrillPBError.getSchema());
+ break;
+
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+
+
+ public void writeTo(Output output, GetSchemasResp message) throws IOException
+ {
+ if(message.status != null)
+ output.writeEnum(1, message.status.number, false);
+
+ if(message.schemas != null)
+ {
+ for(SchemaMetadata schemas : message.schemas)
+ {
+ if(schemas != null)
+ output.writeObject(2, schemas, SchemaMetadata.getSchema(), true);
+ }
+ }
+
+
+ if(message.error != null)
+ output.writeObject(3, message.error, DrillPBError.getSchema(), false);
+
+ }
+
+ public String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "status";
+ case 2: return "schemas";
+ case 3: return "error";
+ default: return null;
+ }
+ }
+
+ public int getFieldNumber(String name)
+ {
+ final Integer number = __fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+
+ private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
+ static
+ {
+ __fieldMap.put("status", 1);
+ __fieldMap.put("schemas", 2);
+ __fieldMap.put("error", 3);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetTablesReq.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetTablesReq.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetTablesReq.java
new file mode 100644
index 0000000..ce9da61
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetTablesReq.java
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+import com.dyuproject.protostuff.GraphIOUtil;
+import com.dyuproject.protostuff.Input;
+import com.dyuproject.protostuff.Message;
+import com.dyuproject.protostuff.Output;
+import com.dyuproject.protostuff.Schema;
+
+public final class GetTablesReq implements Externalizable, Message<GetTablesReq>, Schema<GetTablesReq>
+{
+
+ public static Schema<GetTablesReq> getSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ public static GetTablesReq getDefaultInstance()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ static final GetTablesReq DEFAULT_INSTANCE = new GetTablesReq();
+
+
+ private LikeFilter catalogNameFilter;
+ private LikeFilter schameNameFilter;
+ private LikeFilter tableNameFilter;
+
+ public GetTablesReq()
+ {
+
+ }
+
+ // getters and setters
+
+ // catalogNameFilter
+
+ public LikeFilter getCatalogNameFilter()
+ {
+ return catalogNameFilter;
+ }
+
+ public GetTablesReq setCatalogNameFilter(LikeFilter catalogNameFilter)
+ {
+ this.catalogNameFilter = catalogNameFilter;
+ return this;
+ }
+
+ // schameNameFilter
+
+ public LikeFilter getSchameNameFilter()
+ {
+ return schameNameFilter;
+ }
+
+ public GetTablesReq setSchameNameFilter(LikeFilter schameNameFilter)
+ {
+ this.schameNameFilter = schameNameFilter;
+ return this;
+ }
+
+ // tableNameFilter
+
+ public LikeFilter getTableNameFilter()
+ {
+ return tableNameFilter;
+ }
+
+ public GetTablesReq setTableNameFilter(LikeFilter tableNameFilter)
+ {
+ this.tableNameFilter = tableNameFilter;
+ return this;
+ }
+
+ // java serialization
+
+ public void readExternal(ObjectInput in) throws IOException
+ {
+ GraphIOUtil.mergeDelimitedFrom(in, this, this);
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException
+ {
+ GraphIOUtil.writeDelimitedTo(out, this, this);
+ }
+
+ // message method
+
+ public Schema<GetTablesReq> cachedSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ // schema methods
+
+ public GetTablesReq newMessage()
+ {
+ return new GetTablesReq();
+ }
+
+ public Class<GetTablesReq> typeClass()
+ {
+ return GetTablesReq.class;
+ }
+
+ public String messageName()
+ {
+ return GetTablesReq.class.getSimpleName();
+ }
+
+ public String messageFullName()
+ {
+ return GetTablesReq.class.getName();
+ }
+
+ public boolean isInitialized(GetTablesReq message)
+ {
+ return true;
+ }
+
+ public void mergeFrom(Input input, GetTablesReq message) throws IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ message.catalogNameFilter = input.mergeObject(message.catalogNameFilter, LikeFilter.getSchema());
+ break;
+
+ case 2:
+ message.schameNameFilter = input.mergeObject(message.schameNameFilter, LikeFilter.getSchema());
+ break;
+
+ case 3:
+ message.tableNameFilter = input.mergeObject(message.tableNameFilter, LikeFilter.getSchema());
+ break;
+
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+
+
+ public void writeTo(Output output, GetTablesReq message) throws IOException
+ {
+ if(message.catalogNameFilter != null)
+ output.writeObject(1, message.catalogNameFilter, LikeFilter.getSchema(), false);
+
+
+ if(message.schameNameFilter != null)
+ output.writeObject(2, message.schameNameFilter, LikeFilter.getSchema(), false);
+
+
+ if(message.tableNameFilter != null)
+ output.writeObject(3, message.tableNameFilter, LikeFilter.getSchema(), false);
+
+ }
+
+ public String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogNameFilter";
+ case 2: return "schameNameFilter";
+ case 3: return "tableNameFilter";
+ default: return null;
+ }
+ }
+
+ public int getFieldNumber(String name)
+ {
+ final Integer number = __fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+
+ private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
+ static
+ {
+ __fieldMap.put("catalogNameFilter", 1);
+ __fieldMap.put("schameNameFilter", 2);
+ __fieldMap.put("tableNameFilter", 3);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetTablesResp.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetTablesResp.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetTablesResp.java
new file mode 100644
index 0000000..c62ed3c
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/GetTablesResp.java
@@ -0,0 +1,221 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.util.ArrayList;
+import java.util.List;
+
+import com.dyuproject.protostuff.GraphIOUtil;
+import com.dyuproject.protostuff.Input;
+import com.dyuproject.protostuff.Message;
+import com.dyuproject.protostuff.Output;
+import com.dyuproject.protostuff.Schema;
+
+public final class GetTablesResp implements Externalizable, Message<GetTablesResp>, Schema<GetTablesResp>
+{
+
+ public static Schema<GetTablesResp> getSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ public static GetTablesResp getDefaultInstance()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ static final GetTablesResp DEFAULT_INSTANCE = new GetTablesResp();
+
+
+ private RequestStatus status;
+ private List<TableMetadata> tables;
+ private DrillPBError error;
+
+ public GetTablesResp()
+ {
+
+ }
+
+ // getters and setters
+
+ // status
+
+ public RequestStatus getStatus()
+ {
+ return status == null ? RequestStatus.UNKNOWN : status;
+ }
+
+ public GetTablesResp setStatus(RequestStatus status)
+ {
+ this.status = status;
+ return this;
+ }
+
+ // tables
+
+ public List<TableMetadata> getTablesList()
+ {
+ return tables;
+ }
+
+ public GetTablesResp setTablesList(List<TableMetadata> tables)
+ {
+ this.tables = tables;
+ return this;
+ }
+
+ // error
+
+ public DrillPBError getError()
+ {
+ return error;
+ }
+
+ public GetTablesResp setError(DrillPBError error)
+ {
+ this.error = error;
+ return this;
+ }
+
+ // java serialization
+
+ public void readExternal(ObjectInput in) throws IOException
+ {
+ GraphIOUtil.mergeDelimitedFrom(in, this, this);
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException
+ {
+ GraphIOUtil.writeDelimitedTo(out, this, this);
+ }
+
+ // message method
+
+ public Schema<GetTablesResp> cachedSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ // schema methods
+
+ public GetTablesResp newMessage()
+ {
+ return new GetTablesResp();
+ }
+
+ public Class<GetTablesResp> typeClass()
+ {
+ return GetTablesResp.class;
+ }
+
+ public String messageName()
+ {
+ return GetTablesResp.class.getSimpleName();
+ }
+
+ public String messageFullName()
+ {
+ return GetTablesResp.class.getName();
+ }
+
+ public boolean isInitialized(GetTablesResp message)
+ {
+ return true;
+ }
+
+ public void mergeFrom(Input input, GetTablesResp message) throws IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ message.status = RequestStatus.valueOf(input.readEnum());
+ break;
+ case 2:
+ if(message.tables == null)
+ message.tables = new ArrayList<TableMetadata>();
+ message.tables.add(input.mergeObject(null, TableMetadata.getSchema()));
+ break;
+
+ case 3:
+ message.error = input.mergeObject(message.error, DrillPBError.getSchema());
+ break;
+
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+
+
+ public void writeTo(Output output, GetTablesResp message) throws IOException
+ {
+ if(message.status != null)
+ output.writeEnum(1, message.status.number, false);
+
+ if(message.tables != null)
+ {
+ for(TableMetadata tables : message.tables)
+ {
+ if(tables != null)
+ output.writeObject(2, tables, TableMetadata.getSchema(), true);
+ }
+ }
+
+
+ if(message.error != null)
+ output.writeObject(3, message.error, DrillPBError.getSchema(), false);
+
+ }
+
+ public String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "status";
+ case 2: return "tables";
+ case 3: return "error";
+ default: return null;
+ }
+ }
+
+ public int getFieldNumber(String name)
+ {
+ final Integer number = __fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+
+ private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
+ static
+ {
+ __fieldMap.put("status", 1);
+ __fieldMap.put("tables", 2);
+ __fieldMap.put("error", 3);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/LikeFilter.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/LikeFilter.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/LikeFilter.java
new file mode 100644
index 0000000..88d0716
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/LikeFilter.java
@@ -0,0 +1,185 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+import com.dyuproject.protostuff.GraphIOUtil;
+import com.dyuproject.protostuff.Input;
+import com.dyuproject.protostuff.Message;
+import com.dyuproject.protostuff.Output;
+import com.dyuproject.protostuff.Schema;
+
+public final class LikeFilter implements Externalizable, Message<LikeFilter>, Schema<LikeFilter>
+{
+
+ public static Schema<LikeFilter> getSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ public static LikeFilter getDefaultInstance()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ static final LikeFilter DEFAULT_INSTANCE = new LikeFilter();
+
+
+ private String regex;
+ private String escape;
+
+ public LikeFilter()
+ {
+
+ }
+
+ // getters and setters
+
+ // regex
+
+ public String getRegex()
+ {
+ return regex;
+ }
+
+ public LikeFilter setRegex(String regex)
+ {
+ this.regex = regex;
+ return this;
+ }
+
+ // escape
+
+ public String getEscape()
+ {
+ return escape;
+ }
+
+ public LikeFilter setEscape(String escape)
+ {
+ this.escape = escape;
+ return this;
+ }
+
+ // java serialization
+
+ public void readExternal(ObjectInput in) throws IOException
+ {
+ GraphIOUtil.mergeDelimitedFrom(in, this, this);
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException
+ {
+ GraphIOUtil.writeDelimitedTo(out, this, this);
+ }
+
+ // message method
+
+ public Schema<LikeFilter> cachedSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ // schema methods
+
+ public LikeFilter newMessage()
+ {
+ return new LikeFilter();
+ }
+
+ public Class<LikeFilter> typeClass()
+ {
+ return LikeFilter.class;
+ }
+
+ public String messageName()
+ {
+ return LikeFilter.class.getSimpleName();
+ }
+
+ public String messageFullName()
+ {
+ return LikeFilter.class.getName();
+ }
+
+ public boolean isInitialized(LikeFilter message)
+ {
+ return true;
+ }
+
+ public void mergeFrom(Input input, LikeFilter message) throws IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ message.regex = input.readString();
+ break;
+ case 2:
+ message.escape = input.readString();
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+
+
+ public void writeTo(Output output, LikeFilter message) throws IOException
+ {
+ if(message.regex != null)
+ output.writeString(1, message.regex, false);
+
+ if(message.escape != null)
+ output.writeString(2, message.escape, false);
+ }
+
+ public String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "regex";
+ case 2: return "escape";
+ default: return null;
+ }
+ }
+
+ public int getFieldNumber(String name)
+ {
+ final Integer number = __fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+
+ private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
+ static
+ {
+ __fieldMap.put("regex", 1);
+ __fieldMap.put("escape", 2);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/RequestStatus.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/RequestStatus.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/RequestStatus.java
new file mode 100644
index 0000000..f526ab5
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/RequestStatus.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+public enum RequestStatus implements com.dyuproject.protostuff.EnumLite<RequestStatus>
+{
+ UNKNOWN(0),
+ OK(1),
+ FAILED(2);
+
+ public final int number;
+
+ private RequestStatus (int number)
+ {
+ this.number = number;
+ }
+
+ public int getNumber()
+ {
+ return number;
+ }
+
+ public static RequestStatus valueOf(int number)
+ {
+ switch(number)
+ {
+ case 0: return UNKNOWN;
+ case 1: return OK;
+ case 2: return FAILED;
+ default: return null;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/RpcType.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/RpcType.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/RpcType.java
index 5800183..ff4f4bc 100644
--- a/protocol/src/main/java/org/apache/drill/exec/proto/beans/RpcType.java
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/RpcType.java
@@ -30,9 +30,17 @@ public enum RpcType implements com.dyuproject.protostuff.EnumLite<RpcType>
REQUEST_RESULTS(5),
RESUME_PAUSED_QUERY(11),
GET_QUERY_PLAN_FRAGMENTS(12),
+ GET_CATALOGS(14),
+ GET_SCHEMAS(15),
+ GET_TABLES(16),
+ GET_COLUMNS(17),
QUERY_DATA(6),
QUERY_HANDLE(7),
QUERY_PLAN_FRAGMENTS(13),
+ CATALOGS(18),
+ SCHEMAS(19),
+ TABLES(20),
+ COLUMNS(21),
REQ_META_FUNCTIONS(8),
RESP_FUNCTION_LIST(9),
QUERY_RESULT(10);
@@ -67,6 +75,14 @@ public enum RpcType implements com.dyuproject.protostuff.EnumLite<RpcType>
case 11: return RESUME_PAUSED_QUERY;
case 12: return GET_QUERY_PLAN_FRAGMENTS;
case 13: return QUERY_PLAN_FRAGMENTS;
+ case 14: return GET_CATALOGS;
+ case 15: return GET_SCHEMAS;
+ case 16: return GET_TABLES;
+ case 17: return GET_COLUMNS;
+ case 18: return CATALOGS;
+ case 19: return SCHEMAS;
+ case 20: return TABLES;
+ case 21: return COLUMNS;
default: return null;
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/beans/SchemaMetadata.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/SchemaMetadata.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/SchemaMetadata.java
new file mode 100644
index 0000000..29f6e67
--- /dev/null
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/SchemaMetadata.java
@@ -0,0 +1,251 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
+// Generated from protobuf
+
+package org.apache.drill.exec.proto.beans;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+import com.dyuproject.protostuff.GraphIOUtil;
+import com.dyuproject.protostuff.Input;
+import com.dyuproject.protostuff.Message;
+import com.dyuproject.protostuff.Output;
+import com.dyuproject.protostuff.Schema;
+
+public final class SchemaMetadata implements Externalizable, Message<SchemaMetadata>, Schema<SchemaMetadata>
+{
+
+ public static Schema<SchemaMetadata> getSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ public static SchemaMetadata getDefaultInstance()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ static final SchemaMetadata DEFAULT_INSTANCE = new SchemaMetadata();
+
+
+ private String catalogName;
+ private String schemaName;
+ private String owner;
+ private String type;
+ private String mutable;
+
+ public SchemaMetadata()
+ {
+
+ }
+
+ // getters and setters
+
+ // catalogName
+
+ public String getCatalogName()
+ {
+ return catalogName;
+ }
+
+ public SchemaMetadata setCatalogName(String catalogName)
+ {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ // schemaName
+
+ public String getSchemaName()
+ {
+ return schemaName;
+ }
+
+ public SchemaMetadata setSchemaName(String schemaName)
+ {
+ this.schemaName = schemaName;
+ return this;
+ }
+
+ // owner
+
+ public String getOwner()
+ {
+ return owner;
+ }
+
+ public SchemaMetadata setOwner(String owner)
+ {
+ this.owner = owner;
+ return this;
+ }
+
+ // type
+
+ public String getType()
+ {
+ return type;
+ }
+
+ public SchemaMetadata setType(String type)
+ {
+ this.type = type;
+ return this;
+ }
+
+ // mutable
+
+ public String getMutable()
+ {
+ return mutable;
+ }
+
+ public SchemaMetadata setMutable(String mutable)
+ {
+ this.mutable = mutable;
+ return this;
+ }
+
+ // java serialization
+
+ public void readExternal(ObjectInput in) throws IOException
+ {
+ GraphIOUtil.mergeDelimitedFrom(in, this, this);
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException
+ {
+ GraphIOUtil.writeDelimitedTo(out, this, this);
+ }
+
+ // message method
+
+ public Schema<SchemaMetadata> cachedSchema()
+ {
+ return DEFAULT_INSTANCE;
+ }
+
+ // schema methods
+
+ public SchemaMetadata newMessage()
+ {
+ return new SchemaMetadata();
+ }
+
+ public Class<SchemaMetadata> typeClass()
+ {
+ return SchemaMetadata.class;
+ }
+
+ public String messageName()
+ {
+ return SchemaMetadata.class.getSimpleName();
+ }
+
+ public String messageFullName()
+ {
+ return SchemaMetadata.class.getName();
+ }
+
+ public boolean isInitialized(SchemaMetadata message)
+ {
+ return true;
+ }
+
+ public void mergeFrom(Input input, SchemaMetadata message) throws IOException
+ {
+ for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
+ {
+ switch(number)
+ {
+ case 0:
+ return;
+ case 1:
+ message.catalogName = input.readString();
+ break;
+ case 2:
+ message.schemaName = input.readString();
+ break;
+ case 3:
+ message.owner = input.readString();
+ break;
+ case 4:
+ message.type = input.readString();
+ break;
+ case 5:
+ message.mutable = input.readString();
+ break;
+ default:
+ input.handleUnknownField(number, this);
+ }
+ }
+ }
+
+
+ public void writeTo(Output output, SchemaMetadata message) throws IOException
+ {
+ if(message.catalogName != null)
+ output.writeString(1, message.catalogName, false);
+
+ if(message.schemaName != null)
+ output.writeString(2, message.schemaName, false);
+
+ if(message.owner != null)
+ output.writeString(3, message.owner, false);
+
+ if(message.type != null)
+ output.writeString(4, message.type, false);
+
+ if(message.mutable != null)
+ output.writeString(5, message.mutable, false);
+ }
+
+ public String getFieldName(int number)
+ {
+ switch(number)
+ {
+ case 1: return "catalogName";
+ case 2: return "schemaName";
+ case 3: return "owner";
+ case 4: return "type";
+ case 5: return "mutable";
+ default: return null;
+ }
+ }
+
+ public int getFieldNumber(String name)
+ {
+ final Integer number = __fieldMap.get(name);
+ return number == null ? 0 : number.intValue();
+ }
+
+ private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
+ static
+ {
+ __fieldMap.put("catalogName", 1);
+ __fieldMap.put("schemaName", 2);
+ __fieldMap.put("owner", 3);
+ __fieldMap.put("type", 4);
+ __fieldMap.put("mutable", 5);
+ }
+
+}
[3/5] drill git commit: DRILL-4728: Add support for new metadata
fetch APIs
Posted by ve...@apache.org.
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/protocol/src/main/java/org/apache/drill/exec/proto/UserProtos.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/UserProtos.java b/protocol/src/main/java/org/apache/drill/exec/proto/UserProtos.java
index 60ddc77..1bd4641 100644
--- a/protocol/src/main/java/org/apache/drill/exec/proto/UserProtos.java
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/UserProtos.java
@@ -83,33 +83,97 @@ public final class UserProtos {
*/
GET_QUERY_PLAN_FRAGMENTS(7, 12),
/**
+ * <code>GET_CATALOGS = 14;</code>
+ *
+ * <pre>
+ * user is requesting metadata of catalog(s).
+ * </pre>
+ */
+ GET_CATALOGS(8, 14),
+ /**
+ * <code>GET_SCHEMAS = 15;</code>
+ *
+ * <pre>
+ * user is requesting metadata of schema(s)
+ * </pre>
+ */
+ GET_SCHEMAS(9, 15),
+ /**
+ * <code>GET_TABLES = 16;</code>
+ *
+ * <pre>
+ * user is requesting metadata of table(s)
+ * </pre>
+ */
+ GET_TABLES(10, 16),
+ /**
+ * <code>GET_COLUMNS = 17;</code>
+ *
+ * <pre>
+ * user is requesting metadata of column(s)
+ * </pre>
+ */
+ GET_COLUMNS(11, 17),
+ /**
* <code>QUERY_DATA = 6;</code>
*
* <pre>
* bit to user
* </pre>
*/
- QUERY_DATA(8, 6),
+ QUERY_DATA(12, 6),
/**
* <code>QUERY_HANDLE = 7;</code>
*/
- QUERY_HANDLE(9, 7),
+ QUERY_HANDLE(13, 7),
/**
* <code>QUERY_PLAN_FRAGMENTS = 13;</code>
*
* <pre>
- * return plan fragments
+ * return plan fragments
+ * </pre>
+ */
+ QUERY_PLAN_FRAGMENTS(14, 13),
+ /**
+ * <code>CATALOGS = 18;</code>
+ *
+ * <pre>
+ * return catalogs metadata in response to GET_CATALOGS
+ * </pre>
+ */
+ CATALOGS(15, 18),
+ /**
+ * <code>SCHEMAS = 19;</code>
+ *
+ * <pre>
+ * return schema metadata in response to GET_SCHEMAS
+ * </pre>
+ */
+ SCHEMAS(16, 19),
+ /**
+ * <code>TABLES = 20;</code>
+ *
+ * <pre>
+ * return table metadata in response to GET_TABLES
+ * </pre>
+ */
+ TABLES(17, 20),
+ /**
+ * <code>COLUMNS = 21;</code>
+ *
+ * <pre>
+ * return column metadata in response to GET_COLUMNS
* </pre>
*/
- QUERY_PLAN_FRAGMENTS(10, 13),
+ COLUMNS(18, 21),
/**
* <code>REQ_META_FUNCTIONS = 8;</code>
*/
- REQ_META_FUNCTIONS(11, 8),
+ REQ_META_FUNCTIONS(19, 8),
/**
* <code>RESP_FUNCTION_LIST = 9;</code>
*/
- RESP_FUNCTION_LIST(12, 9),
+ RESP_FUNCTION_LIST(20, 9),
/**
* <code>QUERY_RESULT = 10;</code>
*
@@ -117,7 +181,7 @@ public final class UserProtos {
* drillbit is reporting a query status change, most likely a terminal message, to the user
* </pre>
*/
- QUERY_RESULT(13, 10),
+ QUERY_RESULT(21, 10),
;
/**
@@ -169,6 +233,38 @@ public final class UserProtos {
*/
public static final int GET_QUERY_PLAN_FRAGMENTS_VALUE = 12;
/**
+ * <code>GET_CATALOGS = 14;</code>
+ *
+ * <pre>
+ * user is requesting metadata of catalog(s).
+ * </pre>
+ */
+ public static final int GET_CATALOGS_VALUE = 14;
+ /**
+ * <code>GET_SCHEMAS = 15;</code>
+ *
+ * <pre>
+ * user is requesting metadata of schema(s)
+ * </pre>
+ */
+ public static final int GET_SCHEMAS_VALUE = 15;
+ /**
+ * <code>GET_TABLES = 16;</code>
+ *
+ * <pre>
+ * user is requesting metadata of table(s)
+ * </pre>
+ */
+ public static final int GET_TABLES_VALUE = 16;
+ /**
+ * <code>GET_COLUMNS = 17;</code>
+ *
+ * <pre>
+ * user is requesting metadata of column(s)
+ * </pre>
+ */
+ public static final int GET_COLUMNS_VALUE = 17;
+ /**
* <code>QUERY_DATA = 6;</code>
*
* <pre>
@@ -184,11 +280,43 @@ public final class UserProtos {
* <code>QUERY_PLAN_FRAGMENTS = 13;</code>
*
* <pre>
- * return plan fragments
+ * return plan fragments
* </pre>
*/
public static final int QUERY_PLAN_FRAGMENTS_VALUE = 13;
/**
+ * <code>CATALOGS = 18;</code>
+ *
+ * <pre>
+ * return catalogs metadata in response to GET_CATALOGS
+ * </pre>
+ */
+ public static final int CATALOGS_VALUE = 18;
+ /**
+ * <code>SCHEMAS = 19;</code>
+ *
+ * <pre>
+ * return schema metadata in response to GET_SCHEMAS
+ * </pre>
+ */
+ public static final int SCHEMAS_VALUE = 19;
+ /**
+ * <code>TABLES = 20;</code>
+ *
+ * <pre>
+ * return table metadata in response to GET_TABLES
+ * </pre>
+ */
+ public static final int TABLES_VALUE = 20;
+ /**
+ * <code>COLUMNS = 21;</code>
+ *
+ * <pre>
+ * return column metadata in response to GET_COLUMNS
+ * </pre>
+ */
+ public static final int COLUMNS_VALUE = 21;
+ /**
* <code>REQ_META_FUNCTIONS = 8;</code>
*/
public static final int REQ_META_FUNCTIONS_VALUE = 8;
@@ -218,9 +346,17 @@ public final class UserProtos {
case 5: return REQUEST_RESULTS;
case 11: return RESUME_PAUSED_QUERY;
case 12: return GET_QUERY_PLAN_FRAGMENTS;
+ case 14: return GET_CATALOGS;
+ case 15: return GET_SCHEMAS;
+ case 16: return GET_TABLES;
+ case 17: return GET_COLUMNS;
case 6: return QUERY_DATA;
case 7: return QUERY_HANDLE;
case 13: return QUERY_PLAN_FRAGMENTS;
+ case 18: return CATALOGS;
+ case 19: return SCHEMAS;
+ case 20: return TABLES;
+ case 21: return COLUMNS;
case 8: return REQ_META_FUNCTIONS;
case 9: return RESP_FUNCTION_LIST;
case 10: return QUERY_RESULT;
@@ -488,6 +624,102 @@ public final class UserProtos {
// @@protoc_insertion_point(enum_scope:exec.user.HandshakeStatus)
}
+ /**
+ * Protobuf enum {@code exec.user.RequestStatus}
+ *
+ * <pre>
+ *
+ * Enum indicating the request status.
+ * </pre>
+ */
+ public enum RequestStatus
+ implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * <code>UNKNOWN = 0;</code>
+ */
+ UNKNOWN(0, 0),
+ /**
+ * <code>OK = 1;</code>
+ */
+ OK(1, 1),
+ /**
+ * <code>FAILED = 2;</code>
+ */
+ FAILED(2, 2),
+ ;
+
+ /**
+ * <code>UNKNOWN = 0;</code>
+ */
+ public static final int UNKNOWN_VALUE = 0;
+ /**
+ * <code>OK = 1;</code>
+ */
+ public static final int OK_VALUE = 1;
+ /**
+ * <code>FAILED = 2;</code>
+ */
+ public static final int FAILED_VALUE = 2;
+
+
+ public final int getNumber() { return value; }
+
+ public static RequestStatus valueOf(int value) {
+ switch (value) {
+ case 0: return UNKNOWN;
+ case 1: return OK;
+ case 2: return FAILED;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap<RequestStatus>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap<RequestStatus>
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap<RequestStatus>() {
+ public RequestStatus findValueByNumber(int number) {
+ return RequestStatus.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.drill.exec.proto.UserProtos.getDescriptor().getEnumTypes().get(3);
+ }
+
+ private static final RequestStatus[] VALUES = values();
+
+ public static RequestStatus valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private RequestStatus(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:exec.user.RequestStatus)
+ }
+
public interface PropertyOrBuilder
extends com.google.protobuf.MessageOrBuilder {
@@ -7150,97 +7382,13132 @@ public final class UserProtos {
// @@protoc_insertion_point(class_scope:exec.user.BitToUserHandshake)
}
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_exec_user_Property_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_exec_user_Property_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_exec_user_UserProperties_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_exec_user_UserProperties_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_exec_user_UserToBitHandshake_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_exec_user_UserToBitHandshake_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_exec_user_RequestResults_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_exec_user_RequestResults_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_exec_user_RunQuery_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_exec_user_RunQuery_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_exec_user_GetQueryPlanFragments_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_exec_user_GetQueryPlanFragments_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_exec_user_QueryPlanFragments_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_exec_user_QueryPlanFragments_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_exec_user_BitToUserHandshake_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_exec_user_BitToUserHandshake_fieldAccessorTable;
+ public interface LikeFilterOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
- public static com.google.protobuf.Descriptors.FileDescriptor
- getDescriptor() {
- return descriptor;
+ // optional string regex = 1;
+ /**
+ * <code>optional string regex = 1;</code>
+ *
+ * <pre>
+ * pattern to match
+ * </pre>
+ */
+ boolean hasRegex();
+ /**
+ * <code>optional string regex = 1;</code>
+ *
+ * <pre>
+ * pattern to match
+ * </pre>
+ */
+ java.lang.String getRegex();
+ /**
+ * <code>optional string regex = 1;</code>
+ *
+ * <pre>
+ * pattern to match
+ * </pre>
+ */
+ com.google.protobuf.ByteString
+ getRegexBytes();
+
+ // optional string escape = 2;
+ /**
+ * <code>optional string escape = 2;</code>
+ *
+ * <pre>
+ * escape character (if any) present in the pattern
+ * </pre>
+ */
+ boolean hasEscape();
+ /**
+ * <code>optional string escape = 2;</code>
+ *
+ * <pre>
+ * escape character (if any) present in the pattern
+ * </pre>
+ */
+ java.lang.String getEscape();
+ /**
+ * <code>optional string escape = 2;</code>
+ *
+ * <pre>
+ * escape character (if any) present in the pattern
+ * </pre>
+ */
+ com.google.protobuf.ByteString
+ getEscapeBytes();
}
- private static com.google.protobuf.Descriptors.FileDescriptor
- descriptor;
- static {
- java.lang.String[] descriptorData = {
- "\n\nUser.proto\022\texec.user\032\017SchemaDef.proto" +
- "\032\023UserBitShared.proto\032\rBitData.proto\032\020Bi" +
- "tControl.proto\032\025ExecutionProtos.proto\"&\n" +
- "\010Property\022\013\n\003key\030\001 \002(\t\022\r\n\005value\030\002 \002(\t\"9\n" +
- "\016UserProperties\022\'\n\nproperties\030\001 \003(\0132\023.ex" +
- "ec.user.Property\"\234\002\n\022UserToBitHandshake\022" +
- ".\n\007channel\030\001 \001(\0162\027.exec.shared.RpcChanne" +
- "l:\004USER\022\031\n\021support_listening\030\002 \001(\010\022\023\n\013rp" +
- "c_version\030\003 \001(\005\0221\n\013credentials\030\004 \001(\0132\034.e" +
- "xec.shared.UserCredentials\022-\n\nproperties",
- "\030\005 \001(\0132\031.exec.user.UserProperties\022$\n\025sup" +
- "port_complex_types\030\006 \001(\010:\005false\022\036\n\017suppo" +
- "rt_timeout\030\007 \001(\010:\005false\"S\n\016RequestResult" +
- "s\022&\n\010query_id\030\001 \001(\0132\024.exec.shared.QueryI" +
- "d\022\031\n\021maximum_responses\030\002 \001(\005\"\244\001\n\010RunQuer" +
- "y\0221\n\014results_mode\030\001 \001(\0162\033.exec.user.Quer" +
- "yResultsMode\022$\n\004type\030\002 \001(\0162\026.exec.shared" +
- ".QueryType\022\014\n\004plan\030\003 \001(\t\0221\n\tfragments\030\004 " +
- "\003(\0132\036.exec.bit.control.PlanFragment\"g\n\025G" +
- "etQueryPlanFragments\022\r\n\005query\030\001 \002(\t\022$\n\004t",
- "ype\030\002 \001(\0162\026.exec.shared.QueryType\022\031\n\nspl" +
- "it_plan\030\003 \001(\010:\005false\"\316\001\n\022QueryPlanFragme" +
- "nts\0223\n\006status\030\001 \002(\0162#.exec.shared.QueryR" +
- "esult.QueryState\022&\n\010query_id\030\002 \001(\0132\024.exe" +
- "c.shared.QueryId\0221\n\tfragments\030\003 \003(\0132\036.ex" +
- "ec.bit.control.PlanFragment\022(\n\005error\030\004 \001" +
- "(\0132\031.exec.shared.DrillPBError\"|\n\022BitToUs" +
- "erHandshake\022\023\n\013rpc_version\030\002 \001(\005\022*\n\006stat" +
+ /**
+ * Protobuf type {@code exec.user.LikeFilter}
+ *
+ * <pre>
+ *
+ * Simple filter which encapsulates the SQL LIKE ... ESCAPE function
+ * </pre>
+ */
+ public static final class LikeFilter extends
+ com.google.protobuf.GeneratedMessage
+ implements LikeFilterOrBuilder {
+ // Use LikeFilter.newBuilder() to construct.
+ private LikeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private LikeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final LikeFilter defaultInstance;
+ public static LikeFilter getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public LikeFilter getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private LikeFilter(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ regex_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ escape_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_LikeFilter_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_LikeFilter_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.drill.exec.proto.UserProtos.LikeFilter.class, org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<LikeFilter> PARSER =
+ new com.google.protobuf.AbstractParser<LikeFilter>() {
+ public LikeFilter parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new LikeFilter(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<LikeFilter> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional string regex = 1;
+ public static final int REGEX_FIELD_NUMBER = 1;
+ private java.lang.Object regex_;
+ /**
+ * <code>optional string regex = 1;</code>
+ *
+ * <pre>
+ * pattern to match
+ * </pre>
+ */
+ public boolean hasRegex() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional string regex = 1;</code>
+ *
+ * <pre>
+ * pattern to match
+ * </pre>
+ */
+ public java.lang.String getRegex() {
+ java.lang.Object ref = regex_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ regex_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string regex = 1;</code>
+ *
+ * <pre>
+ * pattern to match
+ * </pre>
+ */
+ public com.google.protobuf.ByteString
+ getRegexBytes() {
+ java.lang.Object ref = regex_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ regex_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional string escape = 2;
+ public static final int ESCAPE_FIELD_NUMBER = 2;
+ private java.lang.Object escape_;
+ /**
+ * <code>optional string escape = 2;</code>
+ *
+ * <pre>
+ * escape character (if any) present in the pattern
+ * </pre>
+ */
+ public boolean hasEscape() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional string escape = 2;</code>
+ *
+ * <pre>
+ * escape character (if any) present in the pattern
+ * </pre>
+ */
+ public java.lang.String getEscape() {
+ java.lang.Object ref = escape_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ escape_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string escape = 2;</code>
+ *
+ * <pre>
+ * escape character (if any) present in the pattern
+ * </pre>
+ */
+ public com.google.protobuf.ByteString
+ getEscapeBytes() {
+ java.lang.Object ref = escape_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ escape_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ regex_ = "";
+ escape_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getRegexBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, getEscapeBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getRegexBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, getEscapeBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.drill.exec.proto.UserProtos.LikeFilter parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.LikeFilter parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.LikeFilter parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.LikeFilter parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.LikeFilter parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.LikeFilter parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.LikeFilter parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.LikeFilter parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.LikeFilter parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.LikeFilter parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.drill.exec.proto.UserProtos.LikeFilter prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code exec.user.LikeFilter}
+ *
+ * <pre>
+ *
+ * Simple filter which encapsulates the SQL LIKE ... ESCAPE function
+ * </pre>
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.drill.exec.proto.UserProtos.LikeFilterOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_LikeFilter_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_LikeFilter_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.drill.exec.proto.UserProtos.LikeFilter.class, org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder.class);
+ }
+
+ // Construct using org.apache.drill.exec.proto.UserProtos.LikeFilter.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ regex_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ escape_ = "";
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_LikeFilter_descriptor;
+ }
+
+ public org.apache.drill.exec.proto.UserProtos.LikeFilter getDefaultInstanceForType() {
+ return org.apache.drill.exec.proto.UserProtos.LikeFilter.getDefaultInstance();
+ }
+
+ public org.apache.drill.exec.proto.UserProtos.LikeFilter build() {
+ org.apache.drill.exec.proto.UserProtos.LikeFilter result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.drill.exec.proto.UserProtos.LikeFilter buildPartial() {
+ org.apache.drill.exec.proto.UserProtos.LikeFilter result = new org.apache.drill.exec.proto.UserProtos.LikeFilter(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.regex_ = regex_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.escape_ = escape_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.drill.exec.proto.UserProtos.LikeFilter) {
+ return mergeFrom((org.apache.drill.exec.proto.UserProtos.LikeFilter)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.drill.exec.proto.UserProtos.LikeFilter other) {
+ if (other == org.apache.drill.exec.proto.UserProtos.LikeFilter.getDefaultInstance()) return this;
+ if (other.hasRegex()) {
+ bitField0_ |= 0x00000001;
+ regex_ = other.regex_;
+ onChanged();
+ }
+ if (other.hasEscape()) {
+ bitField0_ |= 0x00000002;
+ escape_ = other.escape_;
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.drill.exec.proto.UserProtos.LikeFilter parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.drill.exec.proto.UserProtos.LikeFilter) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional string regex = 1;
+ private java.lang.Object regex_ = "";
+ /**
+ * <code>optional string regex = 1;</code>
+ *
+ * <pre>
+ * pattern to match
+ * </pre>
+ */
+ public boolean hasRegex() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional string regex = 1;</code>
+ *
+ * <pre>
+ * pattern to match
+ * </pre>
+ */
+ public java.lang.String getRegex() {
+ java.lang.Object ref = regex_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ regex_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string regex = 1;</code>
+ *
+ * <pre>
+ * pattern to match
+ * </pre>
+ */
+ public com.google.protobuf.ByteString
+ getRegexBytes() {
+ java.lang.Object ref = regex_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ regex_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string regex = 1;</code>
+ *
+ * <pre>
+ * pattern to match
+ * </pre>
+ */
+ public Builder setRegex(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ regex_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string regex = 1;</code>
+ *
+ * <pre>
+ * pattern to match
+ * </pre>
+ */
+ public Builder clearRegex() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ regex_ = getDefaultInstance().getRegex();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string regex = 1;</code>
+ *
+ * <pre>
+ * pattern to match
+ * </pre>
+ */
+ public Builder setRegexBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ regex_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional string escape = 2;
+ private java.lang.Object escape_ = "";
+ /**
+ * <code>optional string escape = 2;</code>
+ *
+ * <pre>
+ * escape character (if any) present in the pattern
+ * </pre>
+ */
+ public boolean hasEscape() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional string escape = 2;</code>
+ *
+ * <pre>
+ * escape character (if any) present in the pattern
+ * </pre>
+ */
+ public java.lang.String getEscape() {
+ java.lang.Object ref = escape_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ escape_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string escape = 2;</code>
+ *
+ * <pre>
+ * escape character (if any) present in the pattern
+ * </pre>
+ */
+ public com.google.protobuf.ByteString
+ getEscapeBytes() {
+ java.lang.Object ref = escape_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ escape_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string escape = 2;</code>
+ *
+ * <pre>
+ * escape character (if any) present in the pattern
+ * </pre>
+ */
+ public Builder setEscape(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ escape_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string escape = 2;</code>
+ *
+ * <pre>
+ * escape character (if any) present in the pattern
+ * </pre>
+ */
+ public Builder clearEscape() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ escape_ = getDefaultInstance().getEscape();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string escape = 2;</code>
+ *
+ * <pre>
+ * escape character (if any) present in the pattern
+ * </pre>
+ */
+ public Builder setEscapeBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ escape_ = value;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:exec.user.LikeFilter)
+ }
+
+ static {
+ defaultInstance = new LikeFilter(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:exec.user.LikeFilter)
+ }
+
+ public interface GetCatalogsReqOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional .exec.user.LikeFilter catalog_name_filter = 1;
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ boolean hasCatalogNameFilter();
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ org.apache.drill.exec.proto.UserProtos.LikeFilter getCatalogNameFilter();
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ org.apache.drill.exec.proto.UserProtos.LikeFilterOrBuilder getCatalogNameFilterOrBuilder();
+ }
+ /**
+ * Protobuf type {@code exec.user.GetCatalogsReq}
+ *
+ * <pre>
+ *
+ * Request message for getting the metadata for catalogs satisfying the given optional filter.
+ * </pre>
+ */
+ public static final class GetCatalogsReq extends
+ com.google.protobuf.GeneratedMessage
+ implements GetCatalogsReqOrBuilder {
+ // Use GetCatalogsReq.newBuilder() to construct.
+ private GetCatalogsReq(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private GetCatalogsReq(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final GetCatalogsReq defaultInstance;
+ public static GetCatalogsReq getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetCatalogsReq getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private GetCatalogsReq(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = catalogNameFilter_.toBuilder();
+ }
+ catalogNameFilter_ = input.readMessage(org.apache.drill.exec.proto.UserProtos.LikeFilter.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(catalogNameFilter_);
+ catalogNameFilter_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_GetCatalogsReq_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_GetCatalogsReq_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.class, org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<GetCatalogsReq> PARSER =
+ new com.google.protobuf.AbstractParser<GetCatalogsReq>() {
+ public GetCatalogsReq parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new GetCatalogsReq(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<GetCatalogsReq> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional .exec.user.LikeFilter catalog_name_filter = 1;
+ public static final int CATALOG_NAME_FILTER_FIELD_NUMBER = 1;
+ private org.apache.drill.exec.proto.UserProtos.LikeFilter catalogNameFilter_;
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ public boolean hasCatalogNameFilter() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ public org.apache.drill.exec.proto.UserProtos.LikeFilter getCatalogNameFilter() {
+ return catalogNameFilter_;
+ }
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ public org.apache.drill.exec.proto.UserProtos.LikeFilterOrBuilder getCatalogNameFilterOrBuilder() {
+ return catalogNameFilter_;
+ }
+
+ private void initFields() {
+ catalogNameFilter_ = org.apache.drill.exec.proto.UserProtos.LikeFilter.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, catalogNameFilter_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, catalogNameFilter_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.drill.exec.proto.UserProtos.GetCatalogsReq parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.GetCatalogsReq parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.GetCatalogsReq parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.GetCatalogsReq parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.GetCatalogsReq parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.GetCatalogsReq parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.GetCatalogsReq parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.GetCatalogsReq parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.GetCatalogsReq parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.GetCatalogsReq parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.drill.exec.proto.UserProtos.GetCatalogsReq prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code exec.user.GetCatalogsReq}
+ *
+ * <pre>
+ *
+ * Request message for getting the metadata for catalogs satisfying the given optional filter.
+ * </pre>
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.drill.exec.proto.UserProtos.GetCatalogsReqOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_GetCatalogsReq_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_GetCatalogsReq_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.class, org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.Builder.class);
+ }
+
+ // Construct using org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getCatalogNameFilterFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (catalogNameFilterBuilder_ == null) {
+ catalogNameFilter_ = org.apache.drill.exec.proto.UserProtos.LikeFilter.getDefaultInstance();
+ } else {
+ catalogNameFilterBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_GetCatalogsReq_descriptor;
+ }
+
+ public org.apache.drill.exec.proto.UserProtos.GetCatalogsReq getDefaultInstanceForType() {
+ return org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.getDefaultInstance();
+ }
+
+ public org.apache.drill.exec.proto.UserProtos.GetCatalogsReq build() {
+ org.apache.drill.exec.proto.UserProtos.GetCatalogsReq result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.drill.exec.proto.UserProtos.GetCatalogsReq buildPartial() {
+ org.apache.drill.exec.proto.UserProtos.GetCatalogsReq result = new org.apache.drill.exec.proto.UserProtos.GetCatalogsReq(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (catalogNameFilterBuilder_ == null) {
+ result.catalogNameFilter_ = catalogNameFilter_;
+ } else {
+ result.catalogNameFilter_ = catalogNameFilterBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.drill.exec.proto.UserProtos.GetCatalogsReq) {
+ return mergeFrom((org.apache.drill.exec.proto.UserProtos.GetCatalogsReq)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.drill.exec.proto.UserProtos.GetCatalogsReq other) {
+ if (other == org.apache.drill.exec.proto.UserProtos.GetCatalogsReq.getDefaultInstance()) return this;
+ if (other.hasCatalogNameFilter()) {
+ mergeCatalogNameFilter(other.getCatalogNameFilter());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.drill.exec.proto.UserProtos.GetCatalogsReq parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.drill.exec.proto.UserProtos.GetCatalogsReq) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional .exec.user.LikeFilter catalog_name_filter = 1;
+ private org.apache.drill.exec.proto.UserProtos.LikeFilter catalogNameFilter_ = org.apache.drill.exec.proto.UserProtos.LikeFilter.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.drill.exec.proto.UserProtos.LikeFilter, org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder, org.apache.drill.exec.proto.UserProtos.LikeFilterOrBuilder> catalogNameFilterBuilder_;
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ public boolean hasCatalogNameFilter() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ public org.apache.drill.exec.proto.UserProtos.LikeFilter getCatalogNameFilter() {
+ if (catalogNameFilterBuilder_ == null) {
+ return catalogNameFilter_;
+ } else {
+ return catalogNameFilterBuilder_.getMessage();
+ }
+ }
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ public Builder setCatalogNameFilter(org.apache.drill.exec.proto.UserProtos.LikeFilter value) {
+ if (catalogNameFilterBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ catalogNameFilter_ = value;
+ onChanged();
+ } else {
+ catalogNameFilterBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ public Builder setCatalogNameFilter(
+ org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder builderForValue) {
+ if (catalogNameFilterBuilder_ == null) {
+ catalogNameFilter_ = builderForValue.build();
+ onChanged();
+ } else {
+ catalogNameFilterBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ public Builder mergeCatalogNameFilter(org.apache.drill.exec.proto.UserProtos.LikeFilter value) {
+ if (catalogNameFilterBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ catalogNameFilter_ != org.apache.drill.exec.proto.UserProtos.LikeFilter.getDefaultInstance()) {
+ catalogNameFilter_ =
+ org.apache.drill.exec.proto.UserProtos.LikeFilter.newBuilder(catalogNameFilter_).mergeFrom(value).buildPartial();
+ } else {
+ catalogNameFilter_ = value;
+ }
+ onChanged();
+ } else {
+ catalogNameFilterBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ public Builder clearCatalogNameFilter() {
+ if (catalogNameFilterBuilder_ == null) {
+ catalogNameFilter_ = org.apache.drill.exec.proto.UserProtos.LikeFilter.getDefaultInstance();
+ onChanged();
+ } else {
+ catalogNameFilterBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ public org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder getCatalogNameFilterBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getCatalogNameFilterFieldBuilder().getBuilder();
+ }
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ public org.apache.drill.exec.proto.UserProtos.LikeFilterOrBuilder getCatalogNameFilterOrBuilder() {
+ if (catalogNameFilterBuilder_ != null) {
+ return catalogNameFilterBuilder_.getMessageOrBuilder();
+ } else {
+ return catalogNameFilter_;
+ }
+ }
+ /**
+ * <code>optional .exec.user.LikeFilter catalog_name_filter = 1;</code>
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.drill.exec.proto.UserProtos.LikeFilter, org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder, org.apache.drill.exec.proto.UserProtos.LikeFilterOrBuilder>
+ getCatalogNameFilterFieldBuilder() {
+ if (catalogNameFilterBuilder_ == null) {
+ catalogNameFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.drill.exec.proto.UserProtos.LikeFilter, org.apache.drill.exec.proto.UserProtos.LikeFilter.Builder, org.apache.drill.exec.proto.UserProtos.LikeFilterOrBuilder>(
+ catalogNameFilter_,
+ getParentForChildren(),
+ isClean());
+ catalogNameFilter_ = null;
+ }
+ return catalogNameFilterBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:exec.user.GetCatalogsReq)
+ }
+
+ static {
+ defaultInstance = new GetCatalogsReq(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:exec.user.GetCatalogsReq)
+ }
+
+ public interface CatalogMetadataOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional string catalog_name = 1;
+ /**
+ * <code>optional string catalog_name = 1;</code>
+ */
+ boolean hasCatalogName();
+ /**
+ * <code>optional string catalog_name = 1;</code>
+ */
+ java.lang.String getCatalogName();
+ /**
+ * <code>optional string catalog_name = 1;</code>
+ */
+ com.google.protobuf.ByteString
+ getCatalogNameBytes();
+
+ // optional string description = 2;
+ /**
+ * <code>optional string description = 2;</code>
+ */
+ boolean hasDescription();
+ /**
+ * <code>optional string description = 2;</code>
+ */
+ java.lang.String getDescription();
+ /**
+ * <code>optional string description = 2;</code>
+ */
+ com.google.protobuf.ByteString
+ getDescriptionBytes();
+
+ // optional string connect = 3;
+ /**
+ * <code>optional string connect = 3;</code>
+ */
+ boolean hasConnect();
+ /**
+ * <code>optional string connect = 3;</code>
+ */
+ java.lang.String getConnect();
+ /**
+ * <code>optional string connect = 3;</code>
+ */
+ com.google.protobuf.ByteString
+ getConnectBytes();
+ }
+ /**
+ * Protobuf type {@code exec.user.CatalogMetadata}
+ *
+ * <pre>
+ *
+ * Message encapsulating metadata for a Catalog.
+ * </pre>
+ */
+ public static final class CatalogMetadata extends
+ com.google.protobuf.GeneratedMessage
+ implements CatalogMetadataOrBuilder {
+ // Use CatalogMetadata.newBuilder() to construct.
+ private CatalogMetadata(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private CatalogMetadata(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final CatalogMetadata defaultInstance;
+ public static CatalogMetadata getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public CatalogMetadata getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private CatalogMetadata(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ catalogName_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ description_ = input.readBytes();
+ break;
+ }
+ case 26: {
+ bitField0_ |= 0x00000004;
+ connect_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_CatalogMetadata_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_CatalogMetadata_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.drill.exec.proto.UserProtos.CatalogMetadata.class, org.apache.drill.exec.proto.UserProtos.CatalogMetadata.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<CatalogMetadata> PARSER =
+ new com.google.protobuf.AbstractParser<CatalogMetadata>() {
+ public CatalogMetadata parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new CatalogMetadata(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<CatalogMetadata> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional string catalog_name = 1;
+ public static final int CATALOG_NAME_FIELD_NUMBER = 1;
+ private java.lang.Object catalogName_;
+ /**
+ * <code>optional string catalog_name = 1;</code>
+ */
+ public boolean hasCatalogName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional string catalog_name = 1;</code>
+ */
+ public java.lang.String getCatalogName() {
+ java.lang.Object ref = catalogName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ catalogName_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string catalog_name = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getCatalogNameBytes() {
+ java.lang.Object ref = catalogName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ catalogName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional string description = 2;
+ public static final int DESCRIPTION_FIELD_NUMBER = 2;
+ private java.lang.Object description_;
+ /**
+ * <code>optional string description = 2;</code>
+ */
+ public boolean hasDescription() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional string description = 2;</code>
+ */
+ public java.lang.String getDescription() {
+ java.lang.Object ref = description_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ description_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string description = 2;</code>
+ */
+ public com.google.protobuf.ByteString
+ getDescriptionBytes() {
+ java.lang.Object ref = description_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ description_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional string connect = 3;
+ public static final int CONNECT_FIELD_NUMBER = 3;
+ private java.lang.Object connect_;
+ /**
+ * <code>optional string connect = 3;</code>
+ */
+ public boolean hasConnect() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional string connect = 3;</code>
+ */
+ public java.lang.String getConnect() {
+ java.lang.Object ref = connect_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ connect_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string connect = 3;</code>
+ */
+ public com.google.protobuf.ByteString
+ getConnectBytes() {
+ java.lang.Object ref = connect_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ connect_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ catalogName_ = "";
+ description_ = "";
+ connect_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getCatalogNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, getDescriptionBytes());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeBytes(3, getConnectBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getCatalogNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, getDescriptionBytes());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(3, getConnectBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.drill.exec.proto.UserProtos.CatalogMetadata parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.CatalogMetadata parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.CatalogMetadata parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.CatalogMetadata parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.CatalogMetadata parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.CatalogMetadata parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.CatalogMetadata parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.CatalogMetadata parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.CatalogMetadata parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.drill.exec.proto.UserProtos.CatalogMetadata parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.drill.exec.proto.UserProtos.CatalogMetadata prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code exec.user.CatalogMetadata}
+ *
+ * <pre>
+ *
+ * Message encapsulating metadata for a Catalog.
+ * </pre>
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.drill.exec.proto.UserProtos.CatalogMetadataOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_CatalogMetadata_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_CatalogMetadata_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.drill.exec.proto.UserProtos.CatalogMetadata.class, org.apache.drill.exec.proto.UserProtos.CatalogMetadata.Builder.class);
+ }
+
+ // Construct using org.apache.drill.exec.proto.UserProtos.CatalogMetadata.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ catalogName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ description_ = "";
+ bitField0_ = (bitField0_ & ~0x00000002);
+ connect_ = "";
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.drill.exec.proto.UserProtos.internal_static_exec_user_CatalogMetadata_descriptor;
+ }
+
+ public org.apache.drill.exec.proto.UserProtos.CatalogMetadata getDefaultInstanceForType() {
+ return org.apache.drill.exec.proto.UserProtos.CatalogMetadata.getDefaultInstance();
+ }
+
+ public org.apache.drill.exec.proto.UserProtos.CatalogMetadata build() {
+ org.apache.drill.exec.proto.UserProtos.CatalogMetadata result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.drill.exec.proto.UserProtos.CatalogMetadata buildPartial() {
+ org.apache.drill.exec.proto.UserProtos.CatalogMetadata result = new org.apache.drill.exec.proto.UserProtos.CatalogMetadata(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.catalogName_ = catalogName_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.description_ = description_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.connect_ = connect_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.drill.exec.proto.UserProtos.CatalogMetadata) {
+ return mergeFrom((org.apache.drill.exec.proto.UserProtos.CatalogMetadata)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.drill.exec.proto.UserProtos.CatalogMetadata other) {
+ if (other == org.apache.drill.exec.proto.UserProtos.CatalogMetadata.getDefaultInstance()) return this;
+ if (other.hasCatalogName()) {
+ bitField0_ |= 0x00000001;
+ catalogName_ = other.catalogName_;
+ onChanged();
+ }
+ if (other.hasDescription()) {
+ bitField0_ |= 0x00000002;
+ description_ = other.description_;
+ onChanged();
+ }
+ if (other.hasConnect()) {
+ bitField0_ |= 0x00000004;
+ connect_ = other.connect_;
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.drill.exec.proto.UserProtos.CatalogMetadata parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.drill.exec.proto.UserProtos.CatalogMetadata) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional string catalog_name = 1;
+ private java.lang.Object catalogName_ = "";
+ /**
+ * <code>optional string catalog_name = 1;</code>
+ */
+ public boolean hasCatalogName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional string catalog_name = 1;</code>
+ */
+ public java.lang.String getCatalogName() {
+ java.lang.Object ref = catalogName_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ catalogName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string catalog_name = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getCatalogNameBytes() {
+ java.lang.Object ref = catalogName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ catalogName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string catalog_name = 1;</code>
+ */
+ public Builder setCatalogName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ catalogName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string catalog_name = 1;</code>
+ */
+ public Builder clearCatalogName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ catalogName_ = getDefaultInstance().getCatalogName();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string catalog_name = 1;</code>
+ */
+ public Builder setCatalogNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ catalogName_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional string description = 2;
+ private java.lang.Object description_ = "";
+ /**
+ * <code>optional string description = 2;</code>
+ */
+ public boolean hasDescription() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional string description = 2;</code>
+ */
+ public java.lang.String getDescription() {
+ java.lang.Object ref = description_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ description_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string description = 2;</code>
+ */
+ public com.google.protobuf.ByteString
+ getDescriptionBytes() {
+ java.lang.Object ref = description_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ description_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string description = 2;</code>
+ */
+ public Builder setDescription(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ description_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string description = 2;</code>
+ */
+ public Builder clearDescription() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ description_ = getDefaultInstance().getDescription();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string description = 2;</code>
+ */
+ public Builder setDescriptionBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ description_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional string connect = 3;
+ private java.lang.Object connect_ = "";
+ /**
+ * <code>optional string connect = 3;</code>
+ */
+ public boolean hasConnect() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional string connect = 3;</code>
+ */
+ public java.lang.String getConnect() {
+ java.lang.Object ref = connect_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ connect_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string connect = 3;</code>
+ */
+ public com.google.protobuf.ByteString
+ getConnectBytes() {
+ java.lang.Object ref = connect_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ connect_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string connect = 3;</code>
+ */
+ public Builder setConnect(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000004;
+ connect_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string connect = 3;</code>
+ */
+ public Builder clearConnect() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ connect_ = getDefaultInstance().getConnect();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string connect = 3;</code>
+ */
+ public Builder setConnectBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000004;
+ connect_ = value;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:exec.user.CatalogMetadata)
+ }
+
+ static {
+ defaultInstance = new CatalogMetadata(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:exec.user.CatalogMetadata)
+ }
+
+ public interface GetCatalogsRespOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional .exec.user.RequestStatus status = 1;
+ /**
+ * <code>optional .exec.user.RequestStatus status = 1;</code>
+ */
+ boolean hasStatus();
+ /**
+ * <code>optional .exec.user.RequestStatus status = 1;</code>
+ */
+ org.apache.drill.exec.proto.UserProtos.RequestStatus getStatus();
+
+ // repeated .exec.user.CatalogMetadata catalogs = 2;
+ /**
+ * <code>repeated .exec.user.CatalogMetadata catalogs = 2;</code>
+ */
+ java.util.List<org.apache.drill.exec.proto.UserProtos.CatalogMetadata>
+ getCatalogsList();
+ /**
+ * <code>repeated .exec.user.CatalogMetadata catalogs = 2;</code>
+ */
+ org.apache.drill.exec.proto.UserProtos.CatalogMetadata getCatalogs(int index);
+ /**
+ * <code>repeated .exec.user.CatalogMetadata catalogs = 2;</code>
+ */
+ int getCatalogsCount();
+ /**
+ * <code>repeated .exec.user.CatalogMetadata catalogs = 2;</code>
+ */
+ java.util.List<? extends org.apache.drill.exec.proto.UserProtos.CatalogMetadataOrBuilder>
+ getCatalogsOrBuilderList();
+ /**
+ * <code>repeated .exec.user.CatalogMetadata catalogs = 2;</code>
+ */
+ org.apache.drill.exec.proto.UserProtos.CatalogMetadataOrBuilder getCatalogsOrBuilder(
+ int index);
+
+ // optional .exec.shared.DrillPBError error = 3;
+ /**
+ * <code>optional .exec.shared.DrillPBError error = 3;</code>
+ */
+ boolean hasError();
+ /**
+ * <code>optional .exec.shared.DrillPBError error = 3;</code>
+ */
+ org.apache.drill.exec.proto.UserBitShared.DrillPBError getError();
+ /**
+ * <code>optional .exec.shared.DrillPBError error = 3;</code>
+ */
+ org.apache.drill.exec.proto.UserBitShared.DrillPBErrorOrBuilder getErrorOrBuilder();
+ }
+ /**
+ * Protobuf type {@code exec.user.GetCatalogsResp}
+ *
+ * <pre>
+ *
+ * Response message for GetCatalogReq.
+ * </pre>
+ */
+ public static final class GetCatalogsResp extends
+ com.google.protobuf.GeneratedMessage
+ implements GetCatalogsRespOrBuilder {
+ // Use GetCatalogsResp.newBuilder() to construct.
+ private GetCatalogsResp(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private GetCatalogsResp(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final GetCatalogsResp defaultInstance;
+ public static GetCatalogsResp getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public GetCatalogsResp getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private GetCatalogsResp(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ int rawValue = input.readEnum();
+ org.apache.drill.exec.proto.UserProtos.RequestStatus value = org.apache.drill.exec.proto.UserProtos.Requ
<TRUNCATED>
[5/5] drill git commit: DRILL-4728: Add support for new metadata
fetch APIs
Posted by ve...@apache.org.
DRILL-4728: Add support for new metadata fetch APIs
+ Protobuf messages
- GetCatalogsReq -> GetCatalogsResp
- GetSchemasReq -> GetSchemasResp
- GetTablesReq -> GetTablesResp
- GetColumnsReq -> GetColumnsResp
+ Java Drill client changes
+ Server side changes to handle the metadata API calls
- Provide a self contained `Runnable` implementation for each metadata API
that process the requests and sends the response to client
- In `UserWorker` override the `handle` method that takes the `ResponseSender` and
send the response from the `handle` method instead of returning it.
- Add a method for each new API to UserWorker to submit the metadata work.
- Add a method `addNewWork(Runnable runnable)` to `WorkerBee` to submit a generic
`Runnable` to `ExecutorService`.
- Move out couple of methods from `QueryContext` into a separate interface
`SchemaConfigInfoProvider` to enable instantiating Schema trees without the
full `QueryContext`
+ New protobuf messages increased the `jdbc-all.jar` size. Up the limit to 21MB.
this closes #527
Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/ef6e522c
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/ef6e522c
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/ef6e522c
Branch: refs/heads/master
Commit: ef6e522c9cba816110aa43ff6bccedf29a901236
Parents: 4bd67a6
Author: vkorukanti <ve...@dremio.com>
Authored: Thu Jun 9 16:03:06 2016 -0700
Committer: vkorukanti <ve...@dremio.com>
Committed: Sun Aug 7 12:47:00 2016 -0700
----------------------------------------------------------------------
.../drill/common/exceptions/ErrorHelper.java | 4 +-
.../apache/drill/exec/client/DrillClient.java | 101 +
.../org/apache/drill/exec/ops/QueryContext.java | 57 +-
.../apache/drill/exec/rpc/user/UserClient.java | 16 +-
.../drill/exec/rpc/user/UserRpcConfig.java | 12 +
.../apache/drill/exec/rpc/user/UserServer.java | 54 +-
.../apache/drill/exec/store/SchemaConfig.java | 50 +-
.../drill/exec/store/SchemaTreeProvider.java | 106 +
.../drill/exec/store/pojo/PojoRecordReader.java | 28 +-
.../org/apache/drill/exec/work/WorkManager.java | 8 +
.../exec/work/metadata/MetadataProvider.java | 486 +
.../apache/drill/exec/work/user/UserWorker.java | 25 +-
.../work/metadata/TestMetadataProvider.java | 308 +
exec/jdbc-all/pom.xml | 2 +-
.../drill/exec/proto/SchemaUserProtos.java | 1752 +++
.../org/apache/drill/exec/proto/UserProtos.java | 13537 ++++++++++++++++-
.../drill/exec/proto/beans/CatalogMetadata.java | 207 +
.../drill/exec/proto/beans/ColumnMetadata.java | 493 +
.../drill/exec/proto/beans/GetCatalogsReq.java | 165 +
.../drill/exec/proto/beans/GetCatalogsResp.java | 221 +
.../drill/exec/proto/beans/GetColumnsReq.java | 237 +
.../drill/exec/proto/beans/GetColumnsResp.java | 221 +
.../drill/exec/proto/beans/GetSchemasReq.java | 189 +
.../drill/exec/proto/beans/GetSchemasResp.java | 221 +
.../drill/exec/proto/beans/GetTablesReq.java | 213 +
.../drill/exec/proto/beans/GetTablesResp.java | 221 +
.../drill/exec/proto/beans/LikeFilter.java | 185 +
.../drill/exec/proto/beans/RequestStatus.java | 51 +
.../apache/drill/exec/proto/beans/RpcType.java | 16 +
.../drill/exec/proto/beans/SchemaMetadata.java | 251 +
.../drill/exec/proto/beans/TableMetadata.java | 229 +
protocol/src/main/protobuf/User.proto | 149 +-
32 files changed, 19635 insertions(+), 180 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/common/src/main/java/org/apache/drill/common/exceptions/ErrorHelper.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/exceptions/ErrorHelper.java b/common/src/main/java/org/apache/drill/common/exceptions/ErrorHelper.java
index 0aa5a1b..9b2097d 100644
--- a/common/src/main/java/org/apache/drill/common/exceptions/ErrorHelper.java
+++ b/common/src/main/java/org/apache/drill/common/exceptions/ErrorHelper.java
@@ -25,7 +25,7 @@ import java.util.regex.Pattern;
/**
* Utility class that handles error message generation from protobuf error objects.
*/
-class ErrorHelper {
+public class ErrorHelper {
private final static Pattern IGNORE= Pattern.compile("^(sun|com\\.sun|java).*");
@@ -96,7 +96,7 @@ class ErrorHelper {
return sb.toString();
}
- static ExceptionWrapper getWrapper(Throwable ex) {
+ public static ExceptionWrapper getWrapper(Throwable ex) {
return getWrapperBuilder(ex).build();
}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
index 11abbcc..8063778 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
@@ -53,7 +53,16 @@ import org.apache.drill.exec.proto.UserBitShared.QueryId;
import org.apache.drill.exec.proto.UserBitShared.QueryResult.QueryState;
import org.apache.drill.exec.proto.UserBitShared.QueryType;
import org.apache.drill.exec.proto.UserProtos;
+import org.apache.drill.exec.proto.UserProtos.GetCatalogsResp;
+import org.apache.drill.exec.proto.UserProtos.GetCatalogsReq;
+import org.apache.drill.exec.proto.UserProtos.GetColumnsReq;
+import org.apache.drill.exec.proto.UserProtos.GetColumnsResp;
import org.apache.drill.exec.proto.UserProtos.GetQueryPlanFragments;
+import org.apache.drill.exec.proto.UserProtos.GetSchemasReq;
+import org.apache.drill.exec.proto.UserProtos.GetSchemasResp;
+import org.apache.drill.exec.proto.UserProtos.GetTablesReq;
+import org.apache.drill.exec.proto.UserProtos.GetTablesResp;
+import org.apache.drill.exec.proto.UserProtos.LikeFilter;
import org.apache.drill.exec.proto.UserProtos.Property;
import org.apache.drill.exec.proto.UserProtos.QueryPlanFragments;
import org.apache.drill.exec.proto.UserProtos.RpcType;
@@ -411,6 +420,98 @@ public class DrillClient implements Closeable, ConnectionThrottle {
}
/**
+ * Get the list of catalogs in <code>INFORMATION_SCHEMA.CATALOGS</code> table satisfying the given filters.
+ *
+ * @param catalogNameFilter Filter on <code>catalog name</code>. Pass null to apply no filter.
+ * @return
+ */
+ public DrillRpcFuture<GetCatalogsResp> getCatalogs(LikeFilter catalogNameFilter) {
+ final GetCatalogsReq.Builder reqBuilder = GetCatalogsReq.newBuilder();
+ if (catalogNameFilter != null) {
+ reqBuilder.setCatalogNameFilter(catalogNameFilter);
+ }
+
+ return client.send(RpcType.GET_CATALOGS, reqBuilder.build(), GetCatalogsResp.class);
+ }
+
+ /**
+ * Get the list of schemas in <code>INFORMATION_SCHEMA.SCHEMATA</code> table satisfying the given filters.
+ *
+ * @param catalogNameFilter Filter on <code>catalog name</code>. Pass null to apply no filter.
+ * @param schemaNameFilter Filter on <code>schema name</code>. Pass null to apply no filter.
+ * @return
+ */
+ public DrillRpcFuture<GetSchemasResp> getSchemas(LikeFilter catalogNameFilter, LikeFilter schemaNameFilter) {
+ final GetSchemasReq.Builder reqBuilder = GetSchemasReq.newBuilder();
+ if (catalogNameFilter != null) {
+ reqBuilder.setCatalogNameFilter(catalogNameFilter);
+ }
+
+ if (schemaNameFilter != null) {
+ reqBuilder.setSchameNameFilter(schemaNameFilter);
+ }
+
+ return client.send(RpcType.GET_SCHEMAS, reqBuilder.build(), GetSchemasResp.class);
+ }
+
+ /**
+ * Get the list of tables in <code>INFORMATION_SCHEMA.TABLES</code> table satisfying the given filters.
+ *
+ * @param catalogNameFilter Filter on <code>catalog name</code>. Pass null to apply no filter.
+ * @param schemaNameFilter Filter on <code>schema name</code>. Pass null to apply no filter.
+ * @param tableNameFilter Filter in <code>table name</code>. Pass null to apply no filter.
+ * @return
+ */
+ public DrillRpcFuture<GetTablesResp> getTables(LikeFilter catalogNameFilter, LikeFilter schemaNameFilter,
+ LikeFilter tableNameFilter) {
+ final GetTablesReq.Builder reqBuilder = GetTablesReq.newBuilder();
+ if (catalogNameFilter != null) {
+ reqBuilder.setCatalogNameFilter(catalogNameFilter);
+ }
+
+ if (schemaNameFilter != null) {
+ reqBuilder.setSchameNameFilter(schemaNameFilter);
+ }
+
+ if (tableNameFilter != null) {
+ reqBuilder.setTableNameFilter(tableNameFilter);
+ }
+
+ return client.send(RpcType.GET_TABLES, reqBuilder.build(), GetTablesResp.class);
+ }
+
+ /**
+ * Get the list of columns in <code>INFORMATION_SCHEMA.COLUMNS</code> table satisfying the given filters.
+ *
+ * @param catalogNameFilter Filter on <code>catalog name</code>. Pass null to apply no filter.
+ * @param schemaNameFilter Filter on <code>schema name</code>. Pass null to apply no filter.
+ * @param tableNameFilter Filter in <code>table name</code>. Pass null to apply no filter.
+ * @param columnNameFilter Filter in <code>column name</code>. Pass null to apply no filter.
+ * @return
+ */
+ public DrillRpcFuture<GetColumnsResp> getColumns(LikeFilter catalogNameFilter, LikeFilter schemaNameFilter,
+ LikeFilter tableNameFilter, LikeFilter columnNameFilter) {
+ final GetColumnsReq.Builder reqBuilder = GetColumnsReq.newBuilder();
+ if (catalogNameFilter != null) {
+ reqBuilder.setCatalogNameFilter(catalogNameFilter);
+ }
+
+ if (schemaNameFilter != null) {
+ reqBuilder.setSchameNameFilter(schemaNameFilter);
+ }
+
+ if (tableNameFilter != null) {
+ reqBuilder.setTableNameFilter(tableNameFilter);
+ }
+
+ if (columnNameFilter != null) {
+ reqBuilder.setColumnNameFilter(columnNameFilter);
+ }
+
+ return client.send(RpcType.GET_COLUMNS, reqBuilder.build(), GetColumnsResp.class);
+ }
+
+ /**
* Submits a Logical plan for direct execution (bypasses parsing)
*
* @param plan the plan to execute
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java
index 3ce0633..44e33cb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/QueryContext.java
@@ -19,16 +19,13 @@ package org.apache.drill.exec.ops;
import io.netty.buffer.DrillBuf;
-import java.io.IOException;
import java.util.Collection;
import java.util.List;
-import org.apache.calcite.jdbc.SimpleCalciteSchema;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.drill.common.AutoCloseables;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.config.LogicalPlanPersistence;
-import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
import org.apache.drill.exec.memory.BufferAllocator;
@@ -41,21 +38,22 @@ import org.apache.drill.exec.proto.helper.QueryIdHelper;
import org.apache.drill.exec.rpc.user.UserSession;
import org.apache.drill.exec.server.DrillbitContext;
import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionValue;
import org.apache.drill.exec.server.options.QueryOptionManager;
-import org.apache.drill.exec.store.AbstractSchema;
import org.apache.drill.exec.store.PartitionExplorer;
import org.apache.drill.exec.store.PartitionExplorerImpl;
import org.apache.drill.exec.store.SchemaConfig;
+import org.apache.drill.exec.store.SchemaConfig.SchemaConfigInfoProvider;
+import org.apache.drill.exec.store.SchemaTreeProvider;
import org.apache.drill.exec.store.StoragePluginRegistry;
import org.apache.drill.exec.testing.ExecutionControls;
-import org.apache.drill.exec.util.ImpersonationUtil;
import org.apache.drill.exec.util.Utilities;
import com.google.common.collect.Lists;
// TODO - consider re-name to PlanningContext, as the query execution context actually appears
// in fragment contexts
-public class QueryContext implements AutoCloseable, OptimizerRulesContext {
+public class QueryContext implements AutoCloseable, OptimizerRulesContext, SchemaConfigInfoProvider {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(QueryContext.class);
private final DrillbitContext drillbitContext;
@@ -70,8 +68,7 @@ public class QueryContext implements AutoCloseable, OptimizerRulesContext {
private final ContextInformation contextInformation;
private final QueryContextInformation queryContextInfo;
private final ViewExpansionContext viewExpansionContext;
-
- private final List<SchemaPlus> schemaTreesToClose;
+ private final SchemaTreeProvider schemaTreeProvider;
/*
* Flag to indicate if close has been called, after calling close the first
@@ -97,7 +94,7 @@ public class QueryContext implements AutoCloseable, OptimizerRulesContext {
plannerSettings.getPlanningMemoryLimit());
bufferManager = new BufferManagerImpl(this.allocator);
viewExpansionContext = new ViewExpansionContext(this);
- schemaTreesToClose = Lists.newArrayList();
+ schemaTreeProvider = new SchemaTreeProvider(drillbitContext);
}
@Override
@@ -146,9 +143,7 @@ public class QueryContext implements AutoCloseable, OptimizerRulesContext {
* @return Root of the schema tree.
*/
public SchemaPlus getRootSchema(final String userName) {
- final String schemaUser = isImpersonationEnabled() ? userName : ImpersonationUtil.getProcessUserName();
- final SchemaConfig schemaConfig = SchemaConfig.newBuilder(schemaUser, this).build();
- return getRootSchema(schemaConfig);
+ return schemaTreeProvider.createRootSchema(userName, this);
}
/**
@@ -157,17 +152,7 @@ public class QueryContext implements AutoCloseable, OptimizerRulesContext {
* @return
*/
public SchemaPlus getRootSchema(SchemaConfig schemaConfig) {
- try {
- final SchemaPlus rootSchema = SimpleCalciteSchema.createRootSchema(false);
- drillbitContext.getSchemaFactory().registerSchemas(schemaConfig, rootSchema);
- schemaTreesToClose.add(rootSchema);
- return rootSchema;
- } catch(IOException e) {
- // We can't proceed further without a schema, throw a runtime exception.
- final String errMsg = String.format("Failed to create schema tree: %s", e.getMessage());
- logger.error(errMsg, e);
- throw new DrillRuntimeException(errMsg, e);
- }
+ return schemaTreeProvider.createRootSchema(schemaConfig);
}
/**
@@ -211,10 +196,16 @@ public class QueryContext implements AutoCloseable, OptimizerRulesContext {
return drillbitContext.getFunctionImplementationRegistry();
}
+ @Override
public ViewExpansionContext getViewExpansionContext() {
return viewExpansionContext;
}
+ @Override
+ public OptionValue getOption(String optionKey) {
+ return getOptions().getOption(optionKey);
+ }
+
public boolean isImpersonationEnabled() {
return getConfig().getBoolean(ExecConstants.IMPERSONATION_ENABLED);
}
@@ -256,28 +247,12 @@ public class QueryContext implements AutoCloseable, OptimizerRulesContext {
// allocator from the toClose list.
toClose.add(bufferManager);
toClose.add(allocator);
+ toClose.add(schemaTreeProvider);
- for(SchemaPlus tree : schemaTreesToClose) {
- addSchemasToCloseList(tree, toClose);
- }
-
- AutoCloseables.close(toClose.toArray(new AutoCloseable[0]));
+ AutoCloseables.close(toClose);
}
} finally {
closed = true;
}
}
-
- private void addSchemasToCloseList(final SchemaPlus tree, final List<AutoCloseable> toClose) {
- for(String subSchemaName : tree.getSubSchemaNames()) {
- addSchemasToCloseList(tree.getSubSchema(subSchemaName), toClose);
- }
-
- try {
- AbstractSchema drillSchemaImpl = tree.unwrap(AbstractSchema.class);
- toClose.add(drillSchemaImpl);
- } catch (ClassCastException e) {
- // Ignore as the SchemaPlus is not an implementation of Drill schema.
- }
- }
}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java
index 86abaca..5106787 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserClient.java
@@ -31,7 +31,11 @@ import org.apache.drill.exec.proto.UserBitShared.QueryData;
import org.apache.drill.exec.proto.UserBitShared.QueryId;
import org.apache.drill.exec.proto.UserBitShared.QueryResult;
import org.apache.drill.exec.proto.UserProtos.BitToUserHandshake;
+import org.apache.drill.exec.proto.UserProtos.GetCatalogsResp;
+import org.apache.drill.exec.proto.UserProtos.GetColumnsResp;
import org.apache.drill.exec.proto.UserProtos.GetQueryPlanFragments;
+import org.apache.drill.exec.proto.UserProtos.GetSchemasResp;
+import org.apache.drill.exec.proto.UserProtos.GetTablesResp;
import org.apache.drill.exec.proto.UserProtos.HandshakeStatus;
import org.apache.drill.exec.proto.UserProtos.QueryPlanFragments;
import org.apache.drill.exec.proto.UserProtos.RpcType;
@@ -99,12 +103,20 @@ public class UserClient extends BasicClientWithConnection<RpcType, UserToBitHand
return BitToUserHandshake.getDefaultInstance();
case RpcType.QUERY_HANDLE_VALUE:
return QueryId.getDefaultInstance();
- case RpcType.QUERY_RESULT_VALUE:
- return QueryResult.getDefaultInstance();
+ case RpcType.QUERY_RESULT_VALUE:
+ return QueryResult.getDefaultInstance();
case RpcType.QUERY_DATA_VALUE:
return QueryData.getDefaultInstance();
case RpcType.QUERY_PLAN_FRAGMENTS_VALUE:
return QueryPlanFragments.getDefaultInstance();
+ case RpcType.CATALOGS_VALUE:
+ return GetCatalogsResp.getDefaultInstance();
+ case RpcType.SCHEMAS_VALUE:
+ return GetSchemasResp.getDefaultInstance();
+ case RpcType.TABLES_VALUE:
+ return GetTablesResp.getDefaultInstance();
+ case RpcType.COLUMNS_VALUE:
+ return GetColumnsResp.getDefaultInstance();
}
throw new RpcException(String.format("Unable to deal with RpcType of %d", rpcType));
}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserRpcConfig.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserRpcConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserRpcConfig.java
index f0cbb22..809ac65 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserRpcConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserRpcConfig.java
@@ -26,7 +26,15 @@ import org.apache.drill.exec.proto.UserBitShared.QueryData;
import org.apache.drill.exec.proto.UserBitShared.QueryId;
import org.apache.drill.exec.proto.UserBitShared.QueryResult;
import org.apache.drill.exec.proto.UserProtos.BitToUserHandshake;
+import org.apache.drill.exec.proto.UserProtos.GetCatalogsResp;
+import org.apache.drill.exec.proto.UserProtos.GetCatalogsReq;
+import org.apache.drill.exec.proto.UserProtos.GetColumnsReq;
+import org.apache.drill.exec.proto.UserProtos.GetColumnsResp;
import org.apache.drill.exec.proto.UserProtos.GetQueryPlanFragments;
+import org.apache.drill.exec.proto.UserProtos.GetSchemasReq;
+import org.apache.drill.exec.proto.UserProtos.GetSchemasResp;
+import org.apache.drill.exec.proto.UserProtos.GetTablesReq;
+import org.apache.drill.exec.proto.UserProtos.GetTablesResp;
import org.apache.drill.exec.proto.UserProtos.QueryPlanFragments;
import org.apache.drill.exec.proto.UserProtos.RpcType;
import org.apache.drill.exec.proto.UserProtos.RunQuery;
@@ -49,6 +57,10 @@ public class UserRpcConfig {
.add(RpcType.RESUME_PAUSED_QUERY, QueryId.class, RpcType.ACK, Ack.class) // user to bit
.add(RpcType.GET_QUERY_PLAN_FRAGMENTS, GetQueryPlanFragments.class,
RpcType.QUERY_PLAN_FRAGMENTS, QueryPlanFragments.class) // user to bit
+ .add(RpcType.GET_CATALOGS, GetCatalogsReq.class, RpcType.CATALOGS, GetCatalogsResp.class) // user to bit
+ .add(RpcType.GET_SCHEMAS, GetSchemasReq.class, RpcType.SCHEMAS, GetSchemasResp.class) // user to bit
+ .add(RpcType.GET_TABLES, GetTablesReq.class, RpcType.TABLES, GetTablesResp.class) // user to bit
+ .add(RpcType.GET_COLUMNS, GetColumnsReq.class, RpcType.COLUMNS, GetColumnsResp.class) // user to bit
.build();
}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserServer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserServer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserServer.java
index 09bc5c8..adf7ec4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserServer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/UserServer.java
@@ -38,7 +38,11 @@ import org.apache.drill.exec.proto.GeneralRPCProtos.RpcMode;
import org.apache.drill.exec.proto.UserBitShared.QueryId;
import org.apache.drill.exec.proto.UserBitShared.QueryResult;
import org.apache.drill.exec.proto.UserProtos.BitToUserHandshake;
+import org.apache.drill.exec.proto.UserProtos.GetCatalogsReq;
+import org.apache.drill.exec.proto.UserProtos.GetColumnsReq;
import org.apache.drill.exec.proto.UserProtos.GetQueryPlanFragments;
+import org.apache.drill.exec.proto.UserProtos.GetSchemasReq;
+import org.apache.drill.exec.proto.UserProtos.GetTablesReq;
import org.apache.drill.exec.proto.UserProtos.HandshakeStatus;
import org.apache.drill.exec.proto.UserProtos.Property;
import org.apache.drill.exec.proto.UserProtos.RpcType;
@@ -51,6 +55,7 @@ import org.apache.drill.exec.rpc.OutboundRpcMessage;
import org.apache.drill.exec.rpc.ProtobufLengthDecoder;
import org.apache.drill.exec.rpc.RemoteConnection;
import org.apache.drill.exec.rpc.Response;
+import org.apache.drill.exec.rpc.ResponseSender;
import org.apache.drill.exec.rpc.RpcException;
import org.apache.drill.exec.rpc.RpcOutcomeListener;
import org.apache.drill.exec.rpc.user.security.UserAuthenticationException;
@@ -101,8 +106,8 @@ public class UserServer extends BasicServer<RpcType, UserServer.UserClientConnec
}
@Override
- protected Response handle(UserClientConnection connection, int rpcType, ByteBuf pBody, ByteBuf dBody)
- throws RpcException {
+ protected void handle(UserClientConnection connection, int rpcType, ByteBuf pBody, ByteBuf dBody,
+ ResponseSender responseSender) throws RpcException {
switch (rpcType) {
case RpcType.RUN_QUERY_VALUE:
@@ -110,7 +115,8 @@ public class UserServer extends BasicServer<RpcType, UserServer.UserClientConnec
try {
final RunQuery query = RunQuery.PARSER.parseFrom(new ByteBufInputStream(pBody));
final QueryId queryId = worker.submitWork(connection, query);
- return new Response(RpcType.QUERY_HANDLE, queryId);
+ responseSender.send(new Response(RpcType.QUERY_HANDLE, queryId));
+ break;
} catch (InvalidProtocolBufferException e) {
throw new RpcException("Failure while decoding RunQuery body.", e);
}
@@ -119,7 +125,8 @@ public class UserServer extends BasicServer<RpcType, UserServer.UserClientConnec
try {
final QueryId queryId = QueryId.PARSER.parseFrom(new ByteBufInputStream(pBody));
final Ack ack = worker.cancelQuery(queryId);
- return new Response(RpcType.ACK, ack);
+ responseSender.send(new Response(RpcType.ACK, ack));
+ break;
} catch (InvalidProtocolBufferException e) {
throw new RpcException("Failure while decoding QueryId body.", e);
}
@@ -128,21 +135,54 @@ public class UserServer extends BasicServer<RpcType, UserServer.UserClientConnec
try {
final QueryId queryId = QueryId.PARSER.parseFrom(new ByteBufInputStream(pBody));
final Ack ack = worker.resumeQuery(queryId);
- return new Response(RpcType.ACK, ack);
+ responseSender.send(new Response(RpcType.ACK, ack));
+ break;
} catch (final InvalidProtocolBufferException e) {
throw new RpcException("Failure while decoding QueryId body.", e);
}
case RpcType.GET_QUERY_PLAN_FRAGMENTS_VALUE:
try {
final GetQueryPlanFragments req = GetQueryPlanFragments.PARSER.parseFrom(new ByteBufInputStream(pBody));
- return new Response(RpcType.QUERY_PLAN_FRAGMENTS, worker.getQueryPlan(connection, req));
+ responseSender.send(new Response(RpcType.QUERY_PLAN_FRAGMENTS, worker.getQueryPlan(connection, req)));
+ break;
} catch(final InvalidProtocolBufferException e) {
throw new RpcException("Failure while decoding GetQueryPlanFragments body.", e);
}
+ case RpcType.GET_CATALOGS_VALUE:
+ try {
+ final GetCatalogsReq req = GetCatalogsReq.PARSER.parseFrom(new ByteBufInputStream(pBody));
+ worker.submitCatalogMetadataWork(connection.getSession(), req, responseSender);
+ break;
+ } catch (final InvalidProtocolBufferException e) {
+ throw new RpcException("Failure while decoding GetCatalogsReq body.", e);
+ }
+ case RpcType.GET_SCHEMAS_VALUE:
+ try {
+ final GetSchemasReq req = GetSchemasReq.PARSER.parseFrom(new ByteBufInputStream(pBody));
+ worker.submitSchemasMetadataWork(connection.getSession(), req, responseSender);
+ break;
+ } catch (final InvalidProtocolBufferException e) {
+ throw new RpcException("Failure while decoding GetSchemasReq body.", e);
+ }
+ case RpcType.GET_TABLES_VALUE:
+ try {
+ final GetTablesReq req = GetTablesReq.PARSER.parseFrom(new ByteBufInputStream(pBody));
+ worker.submitTablesMetadataWork(connection.getSession(), req, responseSender);
+ break;
+ } catch (final InvalidProtocolBufferException e) {
+ throw new RpcException("Failure while decoding GetTablesReq body.", e);
+ }
+ case RpcType.GET_COLUMNS_VALUE:
+ try {
+ final GetColumnsReq req = GetColumnsReq.PARSER.parseFrom(new ByteBufInputStream(pBody));
+ worker.submitColumnsMetadataWork(connection.getSession(), req, responseSender);
+ break;
+ } catch (final InvalidProtocolBufferException e) {
+ throw new RpcException("Failure while decoding GetColumnsReq body.", e);
+ }
default:
throw new UnsupportedOperationException(String.format("UserServer received rpc of unknown type. Type was %d.", rpcType));
}
-
}
public class UserClientConnection extends RemoteConnection {
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java
index 0297945..3e8f1c2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java
@@ -19,7 +19,8 @@ package org.apache.drill.exec.store;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
-import org.apache.drill.exec.ops.QueryContext;
+
+import org.apache.calcite.schema.SchemaPlus;
import org.apache.drill.exec.ops.ViewExpansionContext;
import org.apache.drill.exec.server.options.OptionValue;
@@ -28,29 +29,35 @@ import org.apache.drill.exec.server.options.OptionValue;
*/
public class SchemaConfig {
private final String userName;
- private final QueryContext queryContext;
+ private final SchemaConfigInfoProvider provider;
private final boolean ignoreAuthErrors;
- private SchemaConfig(final String userName, final QueryContext queryContext, final boolean ignoreAuthErrors) {
+ private SchemaConfig(final String userName, final SchemaConfigInfoProvider provider, final boolean ignoreAuthErrors) {
this.userName = userName;
- this.queryContext = queryContext;
+ this.provider = provider;
this.ignoreAuthErrors = ignoreAuthErrors;
}
- public static Builder newBuilder(final String userName, final QueryContext queryContext) {
+ /**
+ * Create new builder.
+ * @param userName Name of the user accessing the storage sources.
+ * @param provider Implementation {@link SchemaConfigInfoProvider}
+ * @return
+ */
+ public static Builder newBuilder(final String userName, final SchemaConfigInfoProvider provider) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(userName), "A valid userName is expected");
- Preconditions.checkNotNull(queryContext, "Non-null QueryContext is expected");
- return new Builder(userName, queryContext);
+ Preconditions.checkNotNull(provider, "Non-null SchemaConfigInfoProvider is expected");
+ return new Builder(userName, provider);
}
public static class Builder {
final String userName;
- final QueryContext queryContext;
+ final SchemaConfigInfoProvider provider;
boolean ignoreAuthErrors;
- private Builder(final String userName, final QueryContext queryContext) {
+ private Builder(final String userName, final SchemaConfigInfoProvider provider) {
this.userName = userName;
- this.queryContext = queryContext;
+ this.provider = provider;
}
public Builder setIgnoreAuthErrors(boolean ignoreAuthErrors) {
@@ -59,16 +66,12 @@ public class SchemaConfig {
}
public SchemaConfig build() {
- return new SchemaConfig(userName, queryContext, ignoreAuthErrors);
+ return new SchemaConfig(userName, provider, ignoreAuthErrors);
}
}
- public QueryContext getQueryContext() {
- return queryContext;
- }
-
/**
- * @return User whom to impersonate as while {@link net.hydromatic.optiq.SchemaPlus} instances
+ * @return User whom to impersonate as while creating {@link SchemaPlus} instances
* interact with the underlying storage.
*/
public String getUserName() {
@@ -76,7 +79,7 @@ public class SchemaConfig {
}
/**
- * @return Should ignore if authorization errors are reported while {@link net.hydromatic.optiq.SchemaPlus}
+ * @return Should ignore if authorization errors are reported while {@link SchemaPlus}
* instances interact with the underlying storage.
*/
public boolean getIgnoreAuthErrors() {
@@ -84,10 +87,19 @@ public class SchemaConfig {
}
public OptionValue getOption(String optionKey) {
- return queryContext.getOptions().getOption(optionKey);
+ return provider.getOption(optionKey);
}
public ViewExpansionContext getViewExpansionContext() {
- return queryContext.getViewExpansionContext();
+ return provider.getViewExpansionContext();
+ }
+
+ /**
+ * Interface to implement to provide required info for {@link org.apache.drill.exec.store.SchemaConfig}
+ */
+ public interface SchemaConfigInfoProvider {
+ ViewExpansionContext getViewExpansionContext();
+
+ OptionValue getOption(String optionKey);
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaTreeProvider.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaTreeProvider.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaTreeProvider.java
new file mode 100644
index 0000000..d05cc43
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaTreeProvider.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import org.apache.calcite.jdbc.SimpleCalciteSchema;
+import org.apache.calcite.schema.SchemaPlus;
+import org.apache.drill.common.AutoCloseables;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.store.SchemaConfig.SchemaConfigInfoProvider;
+import org.apache.drill.exec.util.ImpersonationUtil;
+
+import com.google.common.collect.Lists;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * Class which creates new schema trees. It keeps track of newly created schema trees and closes them safely as
+ * part of {@link #close()}.
+ */
+public class SchemaTreeProvider implements AutoCloseable {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SchemaTreeProvider.class);
+
+ private final DrillbitContext dContext;
+ private final List<SchemaPlus> schemaTreesToClose;
+ private final boolean isImpersonationEnabled;
+
+ public SchemaTreeProvider(final DrillbitContext dContext) {
+ this.dContext = dContext;
+ schemaTreesToClose = Lists.newArrayList();
+ isImpersonationEnabled = dContext.getConfig().getBoolean(ExecConstants.IMPERSONATION_ENABLED);
+ }
+
+ /**
+ * Return root schema with schema owner as the given user.
+ *
+ * @param userName Name of the user who is accessing the storage sources.
+ * @param provider {@link SchemaConfigInfoProvider} instance
+ * @return Root of the schema tree.
+ */
+ public SchemaPlus createRootSchema(final String userName, final SchemaConfigInfoProvider provider) {
+ final String schemaUser = isImpersonationEnabled ? userName : ImpersonationUtil.getProcessUserName();
+ final SchemaConfig schemaConfig = SchemaConfig.newBuilder(schemaUser, provider).build();
+ return createRootSchema(schemaConfig);
+ }
+
+ /**
+ * Create and return a SchemaTree with given <i>schemaConfig</i>.
+ * @param schemaConfig
+ * @return
+ */
+ public SchemaPlus createRootSchema(SchemaConfig schemaConfig) {
+ try {
+ final SchemaPlus rootSchema = SimpleCalciteSchema.createRootSchema(false);
+ dContext.getSchemaFactory().registerSchemas(schemaConfig, rootSchema);
+ schemaTreesToClose.add(rootSchema);
+ return rootSchema;
+ } catch(IOException e) {
+ // We can't proceed further without a schema, throw a runtime exception.
+ throw UserException
+ .resourceError(e)
+ .message("Failed to create schema tree.")
+ .build(logger);
+ }
+ }
+
+ @Override
+ public void close() throws Exception {
+ List<AutoCloseable> toClose = Lists.newArrayList();
+ for(SchemaPlus tree : schemaTreesToClose) {
+ addSchemasToCloseList(tree, toClose);
+ }
+
+ AutoCloseables.close(toClose);
+ }
+
+ private static void addSchemasToCloseList(final SchemaPlus tree, final List<AutoCloseable> toClose) {
+ for(String subSchemaName : tree.getSubSchemaNames()) {
+ addSchemasToCloseList(tree.getSubSchema(subSchemaName), toClose);
+ }
+
+ try {
+ AbstractSchema drillSchemaImpl = tree.unwrap(AbstractSchema.class);
+ toClose.add(drillSchemaImpl);
+ } catch (ClassCastException e) {
+ // Ignore as the SchemaPlus is not an implementation of Drill schema.
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java
index 7feb303..baf07a4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/PojoRecordReader.java
@@ -29,7 +29,6 @@ import org.apache.drill.exec.exception.OutOfMemoryException;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.ops.OperatorContext;
import org.apache.drill.exec.physical.impl.OutputMutator;
-import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.store.AbstractRecordReader;
import org.apache.drill.exec.store.pojo.Writers.BitWriter;
import org.apache.drill.exec.store.pojo.Writers.DoubleWriter;
@@ -47,24 +46,30 @@ import org.apache.drill.exec.testing.ControlsInjectorFactory;
import org.apache.drill.exec.vector.AllocationHelper;
import org.apache.drill.exec.vector.ValueVector;
+import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
-public class PojoRecordReader<T> extends AbstractRecordReader {
+public class PojoRecordReader<T> extends AbstractRecordReader implements Iterable<T> {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PojoRecordReader.class);
private static final ControlsInjector injector = ControlsInjectorFactory.getInjector(PojoRecordReader.class);
- public final int forJsonIgnore = 1;
-
private final Class<T> pojoClass;
- private final Iterator<T> iterator;
+ private final List<T> pojoObjects;
private PojoWriter[] writers;
private boolean doCurrent;
private T currentPojo;
private OperatorContext operatorContext;
+ private Iterator<T> currentIterator;
+
+ /**
+ * TODO: Cleanup the callers to pass the List of POJO objects directly rather than iterator.
+ * @param pojoClass
+ * @param iterator
+ */
public PojoRecordReader(Class<T> pojoClass, Iterator<T> iterator) {
this.pojoClass = pojoClass;
- this.iterator = iterator;
+ this.pojoObjects = ImmutableList.copyOf(iterator);
}
@Override
@@ -118,7 +123,7 @@ public class PojoRecordReader<T> extends AbstractRecordReader {
throw new ExecutionSetupException("Failure while setting up schema for PojoRecordReader.", e);
}
-
+ currentIterator = pojoObjects.iterator();
}
@Override
@@ -146,11 +151,11 @@ public class PojoRecordReader<T> extends AbstractRecordReader {
injector.injectPause(operatorContext.getExecutionControls(), "read-next", logger);
try {
int i =0;
- while (doCurrent || iterator.hasNext()) {
+ while (doCurrent || currentIterator.hasNext()) {
if (doCurrent) {
doCurrent = false;
} else {
- currentPojo = iterator.next();
+ currentPojo = currentIterator.next();
}
if (!allocated) {
@@ -174,6 +179,11 @@ public class PojoRecordReader<T> extends AbstractRecordReader {
}
@Override
+ public Iterator<T> iterator() {
+ return pojoObjects.iterator();
+ }
+
+ @Override
public void close() {
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/exec/java-exec/src/main/java/org/apache/drill/exec/work/WorkManager.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/WorkManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/WorkManager.java
index e910150..ee11592 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/WorkManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/WorkManager.java
@@ -203,6 +203,14 @@ public class WorkManager implements AutoCloseable {
}
/**
+ * Add a self contained runnable work to executor service.
+ * @param runnable
+ */
+ public void addNewWork(final Runnable runnable) {
+ executor.execute(runnable);
+ }
+
+ /**
* Remove the given Foreman from the running query list.
*
* <p>The running query list is a bit of a misnomer, because it doesn't
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/exec/java-exec/src/main/java/org/apache/drill/exec/work/metadata/MetadataProvider.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/metadata/MetadataProvider.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/metadata/MetadataProvider.java
new file mode 100644
index 0000000..aca54b3
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/metadata/MetadataProvider.java
@@ -0,0 +1,486 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.work.metadata;
+
+import static org.apache.drill.exec.store.ischema.InfoSchemaConstants.CATS_COL_CATALOG_NAME;
+import static org.apache.drill.exec.store.ischema.InfoSchemaConstants.SCHS_COL_SCHEMA_NAME;
+import static org.apache.drill.exec.store.ischema.InfoSchemaConstants.SHRD_COL_TABLE_NAME;
+import static org.apache.drill.exec.store.ischema.InfoSchemaConstants.SHRD_COL_TABLE_SCHEMA;
+import static org.apache.drill.exec.store.ischema.InfoSchemaTableType.CATALOGS;
+import static org.apache.drill.exec.store.ischema.InfoSchemaTableType.COLUMNS;
+import static org.apache.drill.exec.store.ischema.InfoSchemaTableType.SCHEMATA;
+import static org.apache.drill.exec.store.ischema.InfoSchemaTableType.TABLES;
+
+import java.util.UUID;
+
+import org.apache.calcite.schema.SchemaPlus;
+import org.apache.drill.common.exceptions.ErrorHelper;
+import org.apache.drill.exec.ops.ViewExpansionContext;
+import org.apache.drill.exec.proto.UserBitShared.DrillPBError;
+import org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType;
+import org.apache.drill.exec.proto.UserProtos.CatalogMetadata;
+import org.apache.drill.exec.proto.UserProtos.ColumnMetadata;
+import org.apache.drill.exec.proto.UserProtos.GetCatalogsResp;
+import org.apache.drill.exec.proto.UserProtos.GetCatalogsReq;
+import org.apache.drill.exec.proto.UserProtos.GetColumnsReq;
+import org.apache.drill.exec.proto.UserProtos.GetColumnsResp;
+import org.apache.drill.exec.proto.UserProtos.GetSchemasReq;
+import org.apache.drill.exec.proto.UserProtos.GetSchemasResp;
+import org.apache.drill.exec.proto.UserProtos.GetTablesReq;
+import org.apache.drill.exec.proto.UserProtos.GetTablesResp;
+import org.apache.drill.exec.proto.UserProtos.LikeFilter;
+import org.apache.drill.exec.proto.UserProtos.RequestStatus;
+import org.apache.drill.exec.proto.UserProtos.RpcType;
+import org.apache.drill.exec.proto.UserProtos.SchemaMetadata;
+import org.apache.drill.exec.proto.UserProtos.TableMetadata;
+import org.apache.drill.exec.rpc.Response;
+import org.apache.drill.exec.rpc.ResponseSender;
+import org.apache.drill.exec.rpc.user.UserSession;
+import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.server.options.OptionValue;
+import org.apache.drill.exec.store.SchemaConfig.SchemaConfigInfoProvider;
+import org.apache.drill.exec.store.SchemaTreeProvider;
+import org.apache.drill.exec.store.ischema.InfoSchemaConstants;
+import org.apache.drill.exec.store.ischema.InfoSchemaFilter;
+import org.apache.drill.exec.store.ischema.InfoSchemaFilter.ConstantExprNode;
+import org.apache.drill.exec.store.ischema.InfoSchemaFilter.ExprNode;
+import org.apache.drill.exec.store.ischema.InfoSchemaFilter.FieldExprNode;
+import org.apache.drill.exec.store.ischema.InfoSchemaFilter.FunctionExprNode;
+import org.apache.drill.exec.store.ischema.InfoSchemaTableType;
+import org.apache.drill.exec.store.ischema.Records.Catalog;
+import org.apache.drill.exec.store.ischema.Records.Column;
+import org.apache.drill.exec.store.ischema.Records.Schema;
+import org.apache.drill.exec.store.ischema.Records.Table;
+import org.apache.drill.exec.store.pojo.PojoRecordReader;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
+
+/**
+ * Contains worker {@link Runnable} classes for providing the metadata and related helper methods.
+ */
+public class MetadataProvider {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(MetadataProvider.class);
+
+ private static final String LIKE_FUNCTION = "like";
+ private static final String AND_FUNCTION = "booleanand";
+ private static final String OR_FUNCTION = "booleanor";
+
+ /**
+ * @return Runnable that fetches the catalog metadata for given {@link GetCatalogsReq} and sends response at the end.
+ */
+ public static Runnable catalogs(final UserSession session, final DrillbitContext dContext,
+ final GetCatalogsReq req, final ResponseSender responseSender) {
+ return new CatalogsProvider(session, dContext, req, responseSender);
+ }
+
+ /**
+ * @return Runnable that fetches the schema metadata for given {@link GetSchemasReq} and sends response at the end.
+ */
+ public static Runnable schemas(final UserSession session, final DrillbitContext dContext,
+ final GetSchemasReq req, final ResponseSender responseSender) {
+ return new SchemasProvider(session, dContext, req, responseSender);
+ }
+
+ /**
+ * @return Runnable that fetches the table metadata for given {@link GetTablesReq} and sends response at the end.
+ */
+ public static Runnable tables(final UserSession session, final DrillbitContext dContext,
+ final GetTablesReq req, final ResponseSender responseSender) {
+ return new TablesProvider(session, dContext, req, responseSender);
+ }
+
+ /**
+ * @return Runnable that fetches the column metadata for given {@link GetColumnsReq} and sends response at the end.
+ */
+ public static Runnable columns(final UserSession session, final DrillbitContext dContext,
+ final GetColumnsReq req, final ResponseSender responseSender) {
+ return new ColumnsProvider(session, dContext, req, responseSender);
+ }
+
+ /**
+ * Super class for all metadata provider runnable classes.
+ */
+ private abstract static class MetadataRunnable implements Runnable {
+ protected final UserSession session;
+ private final ResponseSender responseSender;
+ private final DrillbitContext dContext;
+
+ private MetadataRunnable(final UserSession session, final DrillbitContext dContext,
+ final ResponseSender responseSender) {
+ this.session = Preconditions.checkNotNull(session);
+ this.dContext = Preconditions.checkNotNull(dContext);
+ this.responseSender = Preconditions.checkNotNull(responseSender);
+ }
+
+ @Override
+ public void run() {
+ try(SchemaTreeProvider schemaTreeProvider = new SchemaTreeProvider(dContext)) {
+ responseSender.send(runInternal(session, schemaTreeProvider));
+ } catch (final Throwable error) {
+ logger.error("Unhandled metadata provider error", error);
+ }
+ }
+
+ /**
+ * @return A {@link Response} message. Response must be returned in any case.
+ */
+ protected abstract Response runInternal(UserSession session, SchemaTreeProvider schemaProvider);
+ }
+
+ /**
+ * Runnable that fetches the catalog metadata for given {@link GetCatalogsReq} and sends response at the end.
+ */
+ private static class CatalogsProvider extends MetadataRunnable {
+ private final GetCatalogsReq req;
+
+ public CatalogsProvider(final UserSession session, final DrillbitContext dContext,
+ final GetCatalogsReq req, final ResponseSender responseSender) {
+ super(session, dContext, responseSender);
+ this.req = Preconditions.checkNotNull(req);
+ }
+
+ @Override
+ protected Response runInternal(final UserSession session, final SchemaTreeProvider schemaProvider) {
+ final GetCatalogsResp.Builder respBuilder = GetCatalogsResp.newBuilder();
+
+ final InfoSchemaFilter filter = createInfoSchemaFilter(
+ req.hasCatalogNameFilter() ? req.getCatalogNameFilter() : null, null, null, null);
+
+ try {
+ final PojoRecordReader<Catalog> records =
+ (PojoRecordReader<Catalog>) getPojoRecordReader(CATALOGS, filter, schemaProvider, session);
+
+ for(Catalog c : records) {
+ final CatalogMetadata.Builder catBuilder = CatalogMetadata.newBuilder();
+ catBuilder.setCatalogName(c.CATALOG_NAME);
+ catBuilder.setDescription(c.CATALOG_DESCRIPTION);
+ catBuilder.setConnect(c.CATALOG_CONNECT);
+
+ respBuilder.addCatalogs(catBuilder.build());
+ }
+
+ respBuilder.setStatus(RequestStatus.OK);
+ } catch (Throwable e) {
+ respBuilder.setStatus(RequestStatus.FAILED);
+ respBuilder.setError(createPBError("get catalogs", e));
+ } finally {
+ return new Response(RpcType.CATALOGS, respBuilder.build());
+ }
+ }
+ }
+
+ private static class SchemasProvider extends MetadataRunnable {
+ private final GetSchemasReq req;
+
+ private SchemasProvider(final UserSession session, final DrillbitContext dContext,
+ final GetSchemasReq req, final ResponseSender responseSender) {
+ super(session, dContext, responseSender);
+ this.req = Preconditions.checkNotNull(req);
+ }
+
+ @Override
+ protected Response runInternal(final UserSession session, final SchemaTreeProvider schemaProvider) {
+ final GetSchemasResp.Builder respBuilder = GetSchemasResp.newBuilder();
+
+ final InfoSchemaFilter filter = createInfoSchemaFilter(
+ req.hasCatalogNameFilter() ? req.getCatalogNameFilter() : null,
+ req.hasSchameNameFilter() ? req.getSchameNameFilter() : null,
+ null, null);
+
+ try {
+ final PojoRecordReader<Schema> records = (PojoRecordReader<Schema>)
+ getPojoRecordReader(SCHEMATA, filter, schemaProvider, session);
+
+ for(Schema s : records) {
+ final SchemaMetadata.Builder schemaBuilder = SchemaMetadata.newBuilder();
+ schemaBuilder.setCatalogName(s.CATALOG_NAME);
+ schemaBuilder.setSchemaName(s.SCHEMA_NAME);
+ schemaBuilder.setOwner(s.SCHEMA_OWNER);
+ schemaBuilder.setType(s.TYPE);
+ schemaBuilder.setMutable(s.IS_MUTABLE);
+
+ respBuilder.addSchemas(schemaBuilder.build());
+ }
+
+ respBuilder.setStatus(RequestStatus.OK);
+ } catch (Throwable e) {
+ respBuilder.setStatus(RequestStatus.FAILED);
+ respBuilder.setError(createPBError("get schemas", e));
+ } finally {
+ return new Response(RpcType.SCHEMAS, respBuilder.build());
+ }
+ }
+ }
+
+ private static class TablesProvider extends MetadataRunnable {
+ private final GetTablesReq req;
+
+ private TablesProvider(final UserSession session, final DrillbitContext dContext,
+ final GetTablesReq req, final ResponseSender responseSender) {
+ super(session, dContext, responseSender);
+ this.req = Preconditions.checkNotNull(req);
+ }
+
+ @Override
+ protected Response runInternal(final UserSession session, final SchemaTreeProvider schemaProvider) {
+ final GetTablesResp.Builder respBuilder = GetTablesResp.newBuilder();
+
+ final InfoSchemaFilter filter = createInfoSchemaFilter(
+ req.hasCatalogNameFilter() ? req.getCatalogNameFilter() : null,
+ req.hasSchameNameFilter() ? req.getSchameNameFilter() : null,
+ req.hasTableNameFilter() ? req.getTableNameFilter() : null,
+ null);
+
+ try {
+ final PojoRecordReader<Table> records =
+ (PojoRecordReader<Table>)getPojoRecordReader(TABLES, filter, schemaProvider, session);
+
+ for(Table t : records) {
+ final TableMetadata.Builder tableBuilder = TableMetadata.newBuilder();
+ tableBuilder.setCatalogName(t.TABLE_CATALOG);
+ tableBuilder.setSchemaName(t.TABLE_SCHEMA);
+ tableBuilder.setTableName(t.TABLE_NAME);
+ tableBuilder.setType(t.TABLE_TYPE);
+
+ respBuilder.addTables(tableBuilder.build());
+ }
+
+ respBuilder.setStatus(RequestStatus.OK);
+ } catch (Throwable e) {
+ respBuilder.setStatus(RequestStatus.FAILED);
+ respBuilder.setError(createPBError("get tables", e));
+ } finally {
+ return new Response(RpcType.TABLES, respBuilder.build());
+ }
+ }
+ }
+
+ private static class ColumnsProvider extends MetadataRunnable {
+ private final GetColumnsReq req;
+
+ private ColumnsProvider(final UserSession session, final DrillbitContext dContext,
+ final GetColumnsReq req, final ResponseSender responseSender) {
+ super(session, dContext, responseSender);
+ this.req = Preconditions.checkNotNull(req);
+ }
+
+ @Override
+ protected Response runInternal(final UserSession session, final SchemaTreeProvider schemaProvider) {
+ final GetColumnsResp.Builder respBuilder = GetColumnsResp.newBuilder();
+
+ final InfoSchemaFilter filter = createInfoSchemaFilter(
+ req.hasCatalogNameFilter() ? req.getCatalogNameFilter() : null,
+ req.hasSchameNameFilter() ? req.getSchameNameFilter() : null,
+ req.hasTableNameFilter() ? req.getTableNameFilter() : null,
+ req.hasColumnNameFilter() ? req.getColumnNameFilter() : null
+ );
+
+ try {
+ final PojoRecordReader<Column> records =
+ (PojoRecordReader<Column>)getPojoRecordReader(COLUMNS, filter, schemaProvider, session);
+
+ for(Column c : records) {
+ final ColumnMetadata.Builder columnBuilder = ColumnMetadata.newBuilder();
+ columnBuilder.setCatalogName(c.TABLE_CATALOG);
+ columnBuilder.setSchemaName(c.TABLE_SCHEMA);
+ columnBuilder.setTableName(c.TABLE_NAME);
+ columnBuilder.setColumnName(c.COLUMN_NAME);
+ columnBuilder.setOrdinalPosition(c.ORDINAL_POSITION);
+ if (c.COLUMN_DEFAULT != null) {
+ columnBuilder.setDefaultValue(c.COLUMN_DEFAULT);
+ }
+
+ if ("YES".equalsIgnoreCase(c.IS_NULLABLE)) {
+ columnBuilder.setIsNullable(true);
+ } else {
+ columnBuilder.setIsNullable(false);
+ }
+ columnBuilder.setDataType(c.DATA_TYPE);
+ if (c.CHARACTER_MAXIMUM_LENGTH != null) {
+ columnBuilder.setCharMaxLength(c.CHARACTER_MAXIMUM_LENGTH);
+ }
+
+ if (c.CHARACTER_OCTET_LENGTH != null) {
+ columnBuilder.setCharOctetLength(c.CHARACTER_OCTET_LENGTH);
+ }
+
+ if (c.NUMERIC_PRECISION != null) {
+ columnBuilder.setNumericPrecision(c.NUMERIC_PRECISION);
+ }
+
+ if (c.NUMERIC_PRECISION_RADIX != null) {
+ columnBuilder.setNumericPrecisionRadix(c.NUMERIC_PRECISION_RADIX);
+ }
+
+ if (c.DATETIME_PRECISION != null) {
+ columnBuilder.setDateTimePrecision(c.DATETIME_PRECISION);
+ }
+
+ if (c.INTERVAL_TYPE != null) {
+ columnBuilder.setIntervalType(c.INTERVAL_TYPE);
+ }
+
+ if (c.INTERVAL_PRECISION != null) {
+ columnBuilder.setIntervalPrecision(c.INTERVAL_PRECISION);
+ }
+
+ respBuilder.addColumns(columnBuilder.build());
+ }
+
+ respBuilder.setStatus(RequestStatus.OK);
+ } catch (Exception e) {
+ respBuilder.setStatus(RequestStatus.FAILED);
+ respBuilder.setError(createPBError("get columns", e));
+ } finally {
+ return new Response(RpcType.COLUMNS, respBuilder.build());
+ }
+ }
+ }
+
+ /**
+ * Helper method to create a {@link InfoSchemaFilter} that combines the given filters with an AND.
+ * @param catalogNameFilter Optional filter on <code>catalog name</code>
+ * @param schemaNameFilter Optional filter on <code>schema name</code>
+ * @param tableNameFilter Optional filter on <code>table name</code>
+ * @param columnNameFilter Optional filter on <code>column name</code>
+ * @return
+ */
+ private static InfoSchemaFilter createInfoSchemaFilter(final LikeFilter catalogNameFilter,
+ final LikeFilter schemaNameFilter, final LikeFilter tableNameFilter, final LikeFilter columnNameFilter) {
+
+ FunctionExprNode exprNode = createLikeFunctionExprNode(CATS_COL_CATALOG_NAME, catalogNameFilter);
+
+ exprNode = combineFunctions(AND_FUNCTION,
+ exprNode,
+ combineFunctions(OR_FUNCTION,
+ createLikeFunctionExprNode(SHRD_COL_TABLE_SCHEMA, schemaNameFilter),
+ createLikeFunctionExprNode(SCHS_COL_SCHEMA_NAME, schemaNameFilter)
+ )
+ );
+
+ exprNode = combineFunctions(AND_FUNCTION,
+ exprNode,
+ createLikeFunctionExprNode(SHRD_COL_TABLE_NAME, tableNameFilter)
+ );
+
+ exprNode = combineFunctions(AND_FUNCTION,
+ exprNode,
+ createLikeFunctionExprNode(InfoSchemaConstants.COLS_COL_COLUMN_NAME, columnNameFilter)
+ );
+
+ return exprNode != null ? new InfoSchemaFilter(exprNode) : null;
+ }
+
+ /**
+ * Helper method to create {@link FunctionExprNode} from {@link LikeFilter}.
+ * @param fieldName Name of the filed on which the like expression is applied.
+ * @param likeFilter
+ * @return {@link FunctionExprNode} for given arguments. Null if the <code>likeFilter</code> is null.
+ */
+ private static FunctionExprNode createLikeFunctionExprNode(String fieldName, LikeFilter likeFilter) {
+ if (likeFilter == null) {
+ return null;
+ }
+
+ return new FunctionExprNode(LIKE_FUNCTION,
+ likeFilter.hasEscape() ?
+ ImmutableList.of(
+ new FieldExprNode(fieldName),
+ new ConstantExprNode(likeFilter.getRegex()),
+ new ConstantExprNode(likeFilter.getEscape())) :
+ ImmutableList.of(
+ new FieldExprNode(fieldName),
+ new ConstantExprNode(likeFilter.getRegex()))
+ );
+ }
+
+ /**
+ * Helper method to combine two {@link FunctionExprNode}s with a given <code>functionName</code>. If one of them is
+ * null, other one is returned as it is.
+ */
+ private static FunctionExprNode combineFunctions(final String functionName,
+ final FunctionExprNode func1, final FunctionExprNode func2) {
+ if (func1 == null) {
+ return func2;
+ }
+
+ if (func2 == null) {
+ return func1;
+ }
+
+ return new FunctionExprNode(functionName, ImmutableList.<ExprNode>of(func1, func2));
+ }
+
+ /**
+ * Helper method to create a {@link PojoRecordReader} for given arguments.
+ * @param tableType
+ * @param filter
+ * @param provider
+ * @param userSession
+ * @return
+ */
+ private static PojoRecordReader getPojoRecordReader(final InfoSchemaTableType tableType, final InfoSchemaFilter filter,
+ final SchemaTreeProvider provider, final UserSession userSession) {
+ final SchemaPlus rootSchema =
+ provider.createRootSchema(userSession.getCredentials().getUserName(), newSchemaConfigInfoProvider(userSession));
+ return tableType.getRecordReader(rootSchema, filter, userSession.getOptions());
+ }
+
+ /**
+ * Helper method to create a {@link SchemaConfigInfoProvider} instance for metadata purposes.
+ * @param session
+ * @return
+ */
+ private static SchemaConfigInfoProvider newSchemaConfigInfoProvider(final UserSession session) {
+ return new SchemaConfigInfoProvider() {
+ @Override
+ public ViewExpansionContext getViewExpansionContext() {
+ // Metadata APIs don't expect to expand the views.
+ throw new UnsupportedOperationException("View expansion context is not supported");
+ }
+
+ @Override
+ public OptionValue getOption(String optionKey) {
+ return session.getOptions().getOption(optionKey);
+ }
+ };
+ }
+
+ /**
+ * Helper method to create {@link DrillPBError} for client response message.
+ * @param failedFunction Brief description of the failed function.
+ * @param ex Exception thrown
+ * @return
+ */
+ private static DrillPBError createPBError(final String failedFunction, final Throwable ex) {
+ final String errorId = UUID.randomUUID().toString();
+ logger.error("Failed to {}. ErrorId: {}", failedFunction, errorId, ex);
+
+ final DrillPBError.Builder builder = DrillPBError.newBuilder();
+ builder.setErrorType(ErrorType.SYSTEM); // Metadata requests shouldn't cause any user errors
+ builder.setErrorId(errorId);
+ if (ex.getMessage() != null) {
+ builder.setMessage(ex.getMessage());
+ }
+
+ builder.setException(ErrorHelper.getWrapper(ex));
+
+ return builder.build();
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/exec/java-exec/src/main/java/org/apache/drill/exec/work/user/UserWorker.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/user/UserWorker.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/user/UserWorker.java
index 27126d3..cc614d2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/user/UserWorker.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/user/UserWorker.java
@@ -21,16 +21,22 @@ import java.util.concurrent.ThreadLocalRandom;
import org.apache.drill.exec.proto.GeneralRPCProtos.Ack;
import org.apache.drill.exec.proto.UserBitShared.QueryId;
+import org.apache.drill.exec.proto.UserProtos.GetCatalogsReq;
+import org.apache.drill.exec.proto.UserProtos.GetColumnsReq;
import org.apache.drill.exec.proto.UserProtos.GetQueryPlanFragments;
+import org.apache.drill.exec.proto.UserProtos.GetSchemasReq;
+import org.apache.drill.exec.proto.UserProtos.GetTablesReq;
import org.apache.drill.exec.proto.UserProtos.QueryPlanFragments;
import org.apache.drill.exec.proto.UserProtos.RunQuery;
import org.apache.drill.exec.rpc.Acks;
+import org.apache.drill.exec.rpc.ResponseSender;
import org.apache.drill.exec.rpc.user.UserServer.UserClientConnection;
import org.apache.drill.exec.rpc.user.UserSession;
import org.apache.drill.exec.rpc.user.UserSession.QueryCountIncrementer;
import org.apache.drill.exec.server.options.OptionManager;
import org.apache.drill.exec.work.WorkManager.WorkerBee;
import org.apache.drill.exec.work.foreman.Foreman;
+import org.apache.drill.exec.work.metadata.MetadataProvider;
public class UserWorker{
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(UserWorker.class);
@@ -44,7 +50,6 @@ public class UserWorker{
};
public UserWorker(WorkerBee bee) {
- super();
this.bee = bee;
}
@@ -52,7 +57,7 @@ public class UserWorker{
* Helper method to generate QueryId
* @return generated QueryId
*/
- private QueryId queryIdGenerator() {
+ private static QueryId queryIdGenerator() {
ThreadLocalRandom r = ThreadLocalRandom.current();
// create a new queryid where the first four bytes are a growing time (each new value comes earlier in sequence). Last 12 bytes are random.
@@ -97,4 +102,20 @@ public class UserWorker{
final QueryPlanFragments qPlanFragments = new PlanSplitter().planFragments(bee.getContext(), queryId, req, connection);
return qPlanFragments;
}
+
+ public void submitCatalogMetadataWork(UserSession session, GetCatalogsReq req, ResponseSender sender) {
+ bee.addNewWork(MetadataProvider.catalogs(session, bee.getContext(), req, sender));
+ }
+
+ public void submitSchemasMetadataWork(UserSession session, GetSchemasReq req, ResponseSender sender) {
+ bee.addNewWork(MetadataProvider.schemas(session, bee.getContext(), req, sender));
+ }
+
+ public void submitTablesMetadataWork(UserSession session, GetTablesReq req, ResponseSender sender) {
+ bee.addNewWork(MetadataProvider.tables(session, bee.getContext(), req, sender));
+ }
+
+ public void submitColumnsMetadataWork(UserSession session, GetColumnsReq req, ResponseSender sender) {
+ bee.addNewWork(MetadataProvider.columns(session, bee.getContext(), req, sender));
+ }
}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestMetadataProvider.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestMetadataProvider.java b/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestMetadataProvider.java
new file mode 100644
index 0000000..bd2cdfb
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestMetadataProvider.java
@@ -0,0 +1,308 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.work.metadata;
+
+import static org.apache.drill.exec.store.ischema.InfoSchemaConstants.IS_CATALOG_CONNECT;
+import static org.apache.drill.exec.store.ischema.InfoSchemaConstants.IS_CATALOG_DESCR;
+import static org.apache.drill.exec.store.ischema.InfoSchemaConstants.IS_CATALOG_NAME;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.util.List;
+
+import org.apache.drill.BaseTestQuery;
+import org.apache.drill.exec.proto.UserProtos.CatalogMetadata;
+import org.apache.drill.exec.proto.UserProtos.ColumnMetadata;
+import org.apache.drill.exec.proto.UserProtos.GetCatalogsResp;
+import org.apache.drill.exec.proto.UserProtos.GetColumnsResp;
+import org.apache.drill.exec.proto.UserProtos.GetSchemasResp;
+import org.apache.drill.exec.proto.UserProtos.GetTablesResp;
+import org.apache.drill.exec.proto.UserProtos.LikeFilter;
+import org.apache.drill.exec.proto.UserProtos.RequestStatus;
+import org.apache.drill.exec.proto.UserProtos.SchemaMetadata;
+import org.apache.drill.exec.proto.UserProtos.TableMetadata;
+
+import org.junit.Test;
+
+/**
+ * Tests for metadata provider APIs.
+ */
+public class TestMetadataProvider extends BaseTestQuery {
+
+ @Test
+ public void catalogs() throws Exception {
+ // test("SELECT * FROM INFORMATION_SCHEMA.CATALOGS"); // SQL equivalent
+
+ GetCatalogsResp resp = client.getCatalogs(null).get();
+
+ assertEquals(RequestStatus.OK, resp.getStatus());
+ List<CatalogMetadata> catalogs = resp.getCatalogsList();
+ assertEquals(1, catalogs.size());
+
+ CatalogMetadata c = catalogs.get(0);
+ assertEquals(IS_CATALOG_NAME, c.getCatalogName());
+ assertEquals(IS_CATALOG_DESCR, c.getDescription());
+ assertEquals(IS_CATALOG_CONNECT, c.getConnect());
+ }
+
+ @Test
+ public void catalogsWithFilter() throws Exception {
+ // test("SELECT * FROM INFORMATION_SCHEMA.CATALOGS " +
+ // "WHERE CATALOG_NAME LIKE '%DRI%' ESCAPE '\\'"); // SQL equivalent
+ GetCatalogsResp resp =
+ client.getCatalogs(LikeFilter.newBuilder().setRegex("%DRI%").setEscape("\\").build()).get();
+
+ assertEquals(RequestStatus.OK, resp.getStatus());
+ List<CatalogMetadata> catalogs = resp.getCatalogsList();
+ assertEquals(1, catalogs.size());
+
+ CatalogMetadata c = catalogs.get(0);
+ assertEquals(IS_CATALOG_NAME, c.getCatalogName());
+ assertEquals(IS_CATALOG_DESCR, c.getDescription());
+ assertEquals(IS_CATALOG_CONNECT, c.getConnect());
+ }
+
+ @Test
+ public void catalogsWithFilterNegative() throws Exception {
+ // test("SELECT * FROM INFORMATION_SCHEMA.CATALOGS
+ // WHERE CATALOG_NAME LIKE '%DRIj\\\\hgjh%' ESCAPE '\\'"); // SQL equivalent
+
+ GetCatalogsResp resp =
+ client.getCatalogs(LikeFilter.newBuilder().setRegex("%DRIj\\%hgjh%").setEscape("\\").build()).get();
+
+ assertEquals(RequestStatus.OK, resp.getStatus());
+ List<CatalogMetadata> catalogs = resp.getCatalogsList();
+ assertEquals(0, catalogs.size());
+ }
+
+ @Test
+ public void schemas() throws Exception {
+ // test("SELECT * FROM INFORMATION_SCHEMA.SCHEMATA"); // SQL equivalent
+
+ GetSchemasResp resp = client.getSchemas(null, null).get();
+
+ assertEquals(RequestStatus.OK, resp.getStatus());
+ List<SchemaMetadata> schemas = resp.getSchemasList();
+ assertEquals(9, schemas.size());
+
+ verifySchema("INFORMATION_SCHEMA", schemas);
+ verifySchema("cp.default", schemas);
+ verifySchema("dfs.default", schemas);
+ verifySchema("dfs.root", schemas);
+ verifySchema("dfs.tmp", schemas);
+ verifySchema("dfs_test.default", schemas);
+ verifySchema("dfs_test.home", schemas);
+ verifySchema("dfs_test.tmp", schemas);
+ verifySchema("sys", schemas);
+ }
+
+ @Test
+ public void schemasWithSchemaNameFilter() throws Exception {
+ // test("SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME LIKE '%y%'"); // SQL equivalent
+
+ GetSchemasResp resp = client.getSchemas(null, LikeFilter.newBuilder().setRegex("%y%").build()).get();
+
+ assertEquals(RequestStatus.OK, resp.getStatus());
+ List<SchemaMetadata> schemas = resp.getSchemasList();
+ assertEquals(1, schemas.size());
+
+ verifySchema("sys", schemas);
+ }
+
+ @Test
+ public void schemasWithCatalogNameFilterAndSchemaNameFilter() throws Exception {
+
+ // test("SELECT * FROM INFORMATION_SCHEMA.SCHEMATA " +
+ // "WHERE CATALOG_NAME LIKE '%RI%' AND SCHEMA_NAME LIKE '%y%'"); // SQL equivalent
+
+ GetSchemasResp resp = client.getSchemas(
+ LikeFilter.newBuilder().setRegex("%RI%").build(),
+ LikeFilter.newBuilder().setRegex("%dfs_test%").build()).get();
+
+ assertEquals(RequestStatus.OK, resp.getStatus());
+ List<SchemaMetadata> schemas = resp.getSchemasList();
+ assertEquals(3, schemas.size());
+
+ verifySchema("dfs_test.default", schemas);
+ verifySchema("dfs_test.home", schemas);
+ verifySchema("dfs_test.tmp", schemas);
+ }
+
+ @Test
+ public void tables() throws Exception {
+ // test("SELECT * FROM INFORMATION_SCHEMA.`TABLES`"); // SQL equivalent
+
+ GetTablesResp resp = client.getTables(null, null, null).get();
+
+ assertEquals(RequestStatus.OK, resp.getStatus());
+ List<TableMetadata> tables = resp.getTablesList();
+ assertEquals(11, tables.size());
+
+ verifyTable("INFORMATION_SCHEMA", "CATALOGS", tables);
+ verifyTable("INFORMATION_SCHEMA", "COLUMNS", tables);
+ verifyTable("INFORMATION_SCHEMA", "SCHEMATA", tables);
+ verifyTable("INFORMATION_SCHEMA", "TABLES", tables);
+ verifyTable("INFORMATION_SCHEMA", "VIEWS", tables);
+ verifyTable("sys", "boot", tables);
+ verifyTable("sys", "drillbits", tables);
+ verifyTable("sys", "memory", tables);
+ verifyTable("sys", "options", tables);
+ verifyTable("sys", "threads", tables);
+ verifyTable("sys", "version", tables);
+ }
+
+ @Test
+ public void tablesWithTableNameFilter() throws Exception {
+ // test("SELECT * FROM INFORMATION_SCHEMA.`TABLES` WHERE TABLE_NAME LIKE '%o%'"); // SQL equivalent
+
+ GetTablesResp resp = client.getTables(null, null,
+ LikeFilter.newBuilder().setRegex("%o%").build()).get();
+
+ assertEquals(RequestStatus.OK, resp.getStatus());
+ List<TableMetadata> tables = resp.getTablesList();
+ assertEquals(4, tables.size());
+
+ verifyTable("sys", "boot", tables);
+ verifyTable("sys", "memory", tables);
+ verifyTable("sys", "options", tables);
+ verifyTable("sys", "version", tables);
+ }
+
+ @Test
+ public void tablesWithTableNameFilterAndSchemaNameFilter() throws Exception {
+ // test("SELECT * FROM INFORMATION_SCHEMA.`TABLES` " +
+ // "WHERE TABLE_SCHEMA LIKE '%N\\_S%' ESCAPE '\\' AND TABLE_NAME LIKE '%o%'"); // SQL equivalent
+
+ GetTablesResp resp = client.getTables(null,
+ LikeFilter.newBuilder().setRegex("%N\\_S%").setEscape("\\").build(),
+ LikeFilter.newBuilder().setRegex("%o%").build()).get();
+
+ assertEquals(RequestStatus.OK, resp.getStatus());
+ List<TableMetadata> tables = resp.getTablesList();
+ assertEquals(0, tables.size());
+ }
+
+ @Test
+ public void columns() throws Exception {
+ // test("SELECT * FROM INFORMATION_SCHEMA.COLUMNS"); // SQL equivalent
+
+ GetColumnsResp resp = client.getColumns(null, null, null, null).get();
+
+ assertEquals(RequestStatus.OK, resp.getStatus());
+ List<ColumnMetadata> columns = resp.getColumnsList();
+ assertEquals(70, columns.size());
+ // too many records to verify the output.
+ }
+
+ @Test
+ public void columnsWithColumnNameFilter() throws Exception {
+ // test("SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE COLUMN_NAME LIKE '%\\_p%' ESCAPE '\\'"); // SQL equivalent
+
+ GetColumnsResp resp = client.getColumns(null, null, null,
+ LikeFilter.newBuilder().setRegex("%\\_p%").setEscape("\\").build()).get();
+
+ assertEquals(RequestStatus.OK, resp.getStatus());
+ List<ColumnMetadata> columns = resp.getColumnsList();
+ assertEquals(5, columns.size());
+
+ verifyColumn("sys", "drillbits", "user_port", columns);
+ verifyColumn("sys", "drillbits", "control_port", columns);
+ verifyColumn("sys", "drillbits", "data_port", columns);
+ verifyColumn("sys", "memory", "user_port", columns);
+ verifyColumn("sys", "threads", "user_port", columns);
+ }
+
+ @Test
+ public void columnsWithColumnNameFilterAndTableNameFilter() throws Exception {
+ // test("SELECT * FROM INFORMATION_SCHEMA.COLUMNS
+ // WHERE TABLE_NAME LIKE '%bits' AND COLUMN_NAME LIKE '%\\_p%' ESCAPE '\\'"); // SQL equivalent
+
+ GetColumnsResp resp = client.getColumns(null, null,
+ LikeFilter.newBuilder().setRegex("%bits").build(),
+ LikeFilter.newBuilder().setRegex("%\\_p%").setEscape("\\").build()).get();
+
+ assertEquals(RequestStatus.OK, resp.getStatus());
+ List<ColumnMetadata> columns = resp.getColumnsList();
+ assertEquals(3, columns.size());
+
+ verifyColumn("sys", "drillbits", "user_port", columns);
+ verifyColumn("sys", "drillbits", "control_port", columns);
+ verifyColumn("sys", "drillbits", "data_port", columns);
+ }
+
+ @Test
+ public void columnsWithAllSupportedFilters() throws Exception {
+ // test("SELECT * FROM INFORMATION_SCHEMA.COLUMNS WHERE " +
+ // "TABLE_CATALOG LIKE '%ILL' AND TABLE_SCHEMA LIKE 'sys' AND " +
+ // "TABLE_NAME LIKE '%bits' AND COLUMN_NAME LIKE '%\\_p%' ESCAPE '\\'"); // SQL equivalent
+
+ GetColumnsResp resp = client.getColumns(
+ LikeFilter.newBuilder().setRegex("%ILL").build(),
+ LikeFilter.newBuilder().setRegex("sys").build(),
+ LikeFilter.newBuilder().setRegex("%bits").build(),
+ LikeFilter.newBuilder().setRegex("%\\_p%").setEscape("\\").build()).get();
+
+ assertEquals(RequestStatus.OK, resp.getStatus());
+ List<ColumnMetadata> columns = resp.getColumnsList();
+ assertEquals(3, columns.size());
+
+ verifyColumn("sys", "drillbits", "user_port", columns);
+ verifyColumn("sys", "drillbits", "control_port", columns);
+ verifyColumn("sys", "drillbits", "data_port", columns);
+ }
+
+ /** Helper method to verify schema contents */
+ private static void verifySchema(String schemaName, List<SchemaMetadata> schemas) {
+ for(SchemaMetadata schema : schemas) {
+ if (schemaName.equals(schema.getSchemaName())) {
+ assertEquals(IS_CATALOG_NAME, schema.getCatalogName());
+ return;
+ }
+ }
+
+ fail("Failed to find schema '" + schemaName + "' in results: " + schemas);
+ }
+
+ /** Helper method to verify table contents */
+ private static void verifyTable(String schemaName, String tableName, List<TableMetadata> tables) {
+
+ for(TableMetadata table : tables) {
+ if (tableName.equals(table.getTableName()) && schemaName.equals(table.getSchemaName())) {
+ assertEquals(IS_CATALOG_NAME, table.getCatalogName());
+ return;
+ }
+ }
+
+ fail(String.format("Failed to find table '%s.%s' in results: %s", schemaName, tableName, tables));
+ }
+
+ /** Helper method to verify column contents */
+ private static void verifyColumn(String schemaName, String tableName, String columnName,
+ List<ColumnMetadata> columns) {
+
+ for(ColumnMetadata column : columns) {
+ if (schemaName.equals(column.getSchemaName()) && tableName.equals(column.getTableName()) &&
+ columnName.equals(column.getColumnName())) {
+ assertEquals(IS_CATALOG_NAME, column.getCatalogName());
+ return;
+ }
+ }
+
+ fail(String.format("Failed to find column '%s.%s.%s' in results: %s", schemaName, tableName, columnName, columns));
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/ef6e522c/exec/jdbc-all/pom.xml
----------------------------------------------------------------------
diff --git a/exec/jdbc-all/pom.xml b/exec/jdbc-all/pom.xml
index fe6e5cd..49dbb3e 100644
--- a/exec/jdbc-all/pom.xml
+++ b/exec/jdbc-all/pom.xml
@@ -442,7 +442,7 @@
This is likely due to you adding new dependencies to a java-exec and not updating the excludes in this module. This is important as it minimizes the size of the dependency of Drill application users.
</message>
- <maxsize>20000000</maxsize>
+ <maxsize>21000000</maxsize>
<minsize>15000000</minsize>
<files>
<file>${project.build.directory}/drill-jdbc-all-${project.version}.jar</file>