You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ga...@apache.org on 2018/02/12 18:42:50 UTC
[40/50] [abbrv] hive git commit: HIVE-17990 Add Thrift and DB storage
for Schema Registry objects
http://git-wip-us.apache.org/repos/asf/hive/blob/a9e1acaf/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
index d5e3527..4659b79 100644
--- a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
+++ b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
@@ -404,6 +404,34 @@ import org.slf4j.LoggerFactory;
public WMCreateOrDropTriggerToPoolMappingResponse create_or_drop_wm_trigger_to_pool_mapping(WMCreateOrDropTriggerToPoolMappingRequest request) throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, org.apache.thrift.TException;
+ public void create_ischema(ISchema schema) throws AlreadyExistsException, NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+ public void alter_ischema(String schemaName, ISchema newSchema) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+ public ISchema get_ischema(String schemaName) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+ public void drop_ischema(String schemaName) throws NoSuchObjectException, InvalidOperationException, MetaException, org.apache.thrift.TException;
+
+ public void add_schema_version(SchemaVersion schemaVersion) throws AlreadyExistsException, NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+ public SchemaVersion get_schema_version(String schemaName, int version) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+ public SchemaVersion get_schema_latest_version(String schemaName) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+ public List<SchemaVersion> get_schema_all_versions(String schemaName) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+ public void drop_schema_version(String schemaName, int version) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+ public FindSchemasByColsResp get_schemas_by_cols(FindSchemasByColsRqst rqst) throws MetaException, org.apache.thrift.TException;
+
+ public void map_schema_version_to_serde(String schemaName, int version, String serdeName) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+ public void set_schema_version_state(String schemaName, int version, SchemaVersionState state) throws NoSuchObjectException, InvalidOperationException, MetaException, org.apache.thrift.TException;
+
+ public void add_serde(SerDeInfo serde) throws AlreadyExistsException, MetaException, org.apache.thrift.TException;
+
+ public SerDeInfo get_serde(String serdeName) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
}
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public interface AsyncIface extends com.facebook.fb303.FacebookService .AsyncIface {
@@ -770,6 +798,34 @@ import org.slf4j.LoggerFactory;
public void create_or_drop_wm_trigger_to_pool_mapping(WMCreateOrDropTriggerToPoolMappingRequest request, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+ public void create_ischema(ISchema schema, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void alter_ischema(String schemaName, ISchema newSchema, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void get_ischema(String schemaName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void drop_ischema(String schemaName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void add_schema_version(SchemaVersion schemaVersion, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void get_schema_version(String schemaName, int version, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void get_schema_latest_version(String schemaName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void get_schema_all_versions(String schemaName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void drop_schema_version(String schemaName, int version, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void get_schemas_by_cols(FindSchemasByColsRqst rqst, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void map_schema_version_to_serde(String schemaName, int version, String serdeName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void set_schema_version_state(String schemaName, int version, SchemaVersionState state, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void add_serde(SerDeInfo serde, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void get_serde(String serdeName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
}
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class Client extends com.facebook.fb303.FacebookService.Client implements Iface {
@@ -6034,6 +6090,404 @@ import org.slf4j.LoggerFactory;
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "create_or_drop_wm_trigger_to_pool_mapping failed: unknown result");
}
+ public void create_ischema(ISchema schema) throws AlreadyExistsException, NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ send_create_ischema(schema);
+ recv_create_ischema();
+ }
+
+ public void send_create_ischema(ISchema schema) throws org.apache.thrift.TException
+ {
+ create_ischema_args args = new create_ischema_args();
+ args.setSchema(schema);
+ sendBase("create_ischema", args);
+ }
+
+ public void recv_create_ischema() throws AlreadyExistsException, NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ create_ischema_result result = new create_ischema_result();
+ receiveBase(result, "create_ischema");
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ if (result.o3 != null) {
+ throw result.o3;
+ }
+ return;
+ }
+
+ public void alter_ischema(String schemaName, ISchema newSchema) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ send_alter_ischema(schemaName, newSchema);
+ recv_alter_ischema();
+ }
+
+ public void send_alter_ischema(String schemaName, ISchema newSchema) throws org.apache.thrift.TException
+ {
+ alter_ischema_args args = new alter_ischema_args();
+ args.setSchemaName(schemaName);
+ args.setNewSchema(newSchema);
+ sendBase("alter_ischema", args);
+ }
+
+ public void recv_alter_ischema() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ alter_ischema_result result = new alter_ischema_result();
+ receiveBase(result, "alter_ischema");
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ return;
+ }
+
+ public ISchema get_ischema(String schemaName) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ send_get_ischema(schemaName);
+ return recv_get_ischema();
+ }
+
+ public void send_get_ischema(String schemaName) throws org.apache.thrift.TException
+ {
+ get_ischema_args args = new get_ischema_args();
+ args.setSchemaName(schemaName);
+ sendBase("get_ischema", args);
+ }
+
+ public ISchema recv_get_ischema() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ get_ischema_result result = new get_ischema_result();
+ receiveBase(result, "get_ischema");
+ if (result.isSetSuccess()) {
+ return result.success;
+ }
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "get_ischema failed: unknown result");
+ }
+
+ public void drop_ischema(String schemaName) throws NoSuchObjectException, InvalidOperationException, MetaException, org.apache.thrift.TException
+ {
+ send_drop_ischema(schemaName);
+ recv_drop_ischema();
+ }
+
+ public void send_drop_ischema(String schemaName) throws org.apache.thrift.TException
+ {
+ drop_ischema_args args = new drop_ischema_args();
+ args.setSchemaName(schemaName);
+ sendBase("drop_ischema", args);
+ }
+
+ public void recv_drop_ischema() throws NoSuchObjectException, InvalidOperationException, MetaException, org.apache.thrift.TException
+ {
+ drop_ischema_result result = new drop_ischema_result();
+ receiveBase(result, "drop_ischema");
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ if (result.o3 != null) {
+ throw result.o3;
+ }
+ return;
+ }
+
+ public void add_schema_version(SchemaVersion schemaVersion) throws AlreadyExistsException, NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ send_add_schema_version(schemaVersion);
+ recv_add_schema_version();
+ }
+
+ public void send_add_schema_version(SchemaVersion schemaVersion) throws org.apache.thrift.TException
+ {
+ add_schema_version_args args = new add_schema_version_args();
+ args.setSchemaVersion(schemaVersion);
+ sendBase("add_schema_version", args);
+ }
+
+ public void recv_add_schema_version() throws AlreadyExistsException, NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ add_schema_version_result result = new add_schema_version_result();
+ receiveBase(result, "add_schema_version");
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ if (result.o3 != null) {
+ throw result.o3;
+ }
+ return;
+ }
+
+ public SchemaVersion get_schema_version(String schemaName, int version) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ send_get_schema_version(schemaName, version);
+ return recv_get_schema_version();
+ }
+
+ public void send_get_schema_version(String schemaName, int version) throws org.apache.thrift.TException
+ {
+ get_schema_version_args args = new get_schema_version_args();
+ args.setSchemaName(schemaName);
+ args.setVersion(version);
+ sendBase("get_schema_version", args);
+ }
+
+ public SchemaVersion recv_get_schema_version() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ get_schema_version_result result = new get_schema_version_result();
+ receiveBase(result, "get_schema_version");
+ if (result.isSetSuccess()) {
+ return result.success;
+ }
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "get_schema_version failed: unknown result");
+ }
+
+ public SchemaVersion get_schema_latest_version(String schemaName) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ send_get_schema_latest_version(schemaName);
+ return recv_get_schema_latest_version();
+ }
+
+ public void send_get_schema_latest_version(String schemaName) throws org.apache.thrift.TException
+ {
+ get_schema_latest_version_args args = new get_schema_latest_version_args();
+ args.setSchemaName(schemaName);
+ sendBase("get_schema_latest_version", args);
+ }
+
+ public SchemaVersion recv_get_schema_latest_version() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ get_schema_latest_version_result result = new get_schema_latest_version_result();
+ receiveBase(result, "get_schema_latest_version");
+ if (result.isSetSuccess()) {
+ return result.success;
+ }
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "get_schema_latest_version failed: unknown result");
+ }
+
+ public List<SchemaVersion> get_schema_all_versions(String schemaName) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ send_get_schema_all_versions(schemaName);
+ return recv_get_schema_all_versions();
+ }
+
+ public void send_get_schema_all_versions(String schemaName) throws org.apache.thrift.TException
+ {
+ get_schema_all_versions_args args = new get_schema_all_versions_args();
+ args.setSchemaName(schemaName);
+ sendBase("get_schema_all_versions", args);
+ }
+
+ public List<SchemaVersion> recv_get_schema_all_versions() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ get_schema_all_versions_result result = new get_schema_all_versions_result();
+ receiveBase(result, "get_schema_all_versions");
+ if (result.isSetSuccess()) {
+ return result.success;
+ }
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "get_schema_all_versions failed: unknown result");
+ }
+
+ public void drop_schema_version(String schemaName, int version) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ send_drop_schema_version(schemaName, version);
+ recv_drop_schema_version();
+ }
+
+ public void send_drop_schema_version(String schemaName, int version) throws org.apache.thrift.TException
+ {
+ drop_schema_version_args args = new drop_schema_version_args();
+ args.setSchemaName(schemaName);
+ args.setVersion(version);
+ sendBase("drop_schema_version", args);
+ }
+
+ public void recv_drop_schema_version() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ drop_schema_version_result result = new drop_schema_version_result();
+ receiveBase(result, "drop_schema_version");
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ return;
+ }
+
+ public FindSchemasByColsResp get_schemas_by_cols(FindSchemasByColsRqst rqst) throws MetaException, org.apache.thrift.TException
+ {
+ send_get_schemas_by_cols(rqst);
+ return recv_get_schemas_by_cols();
+ }
+
+ public void send_get_schemas_by_cols(FindSchemasByColsRqst rqst) throws org.apache.thrift.TException
+ {
+ get_schemas_by_cols_args args = new get_schemas_by_cols_args();
+ args.setRqst(rqst);
+ sendBase("get_schemas_by_cols", args);
+ }
+
+ public FindSchemasByColsResp recv_get_schemas_by_cols() throws MetaException, org.apache.thrift.TException
+ {
+ get_schemas_by_cols_result result = new get_schemas_by_cols_result();
+ receiveBase(result, "get_schemas_by_cols");
+ if (result.isSetSuccess()) {
+ return result.success;
+ }
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "get_schemas_by_cols failed: unknown result");
+ }
+
+ public void map_schema_version_to_serde(String schemaName, int version, String serdeName) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ send_map_schema_version_to_serde(schemaName, version, serdeName);
+ recv_map_schema_version_to_serde();
+ }
+
+ public void send_map_schema_version_to_serde(String schemaName, int version, String serdeName) throws org.apache.thrift.TException
+ {
+ map_schema_version_to_serde_args args = new map_schema_version_to_serde_args();
+ args.setSchemaName(schemaName);
+ args.setVersion(version);
+ args.setSerdeName(serdeName);
+ sendBase("map_schema_version_to_serde", args);
+ }
+
+ public void recv_map_schema_version_to_serde() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ map_schema_version_to_serde_result result = new map_schema_version_to_serde_result();
+ receiveBase(result, "map_schema_version_to_serde");
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ return;
+ }
+
+ public void set_schema_version_state(String schemaName, int version, SchemaVersionState state) throws NoSuchObjectException, InvalidOperationException, MetaException, org.apache.thrift.TException
+ {
+ send_set_schema_version_state(schemaName, version, state);
+ recv_set_schema_version_state();
+ }
+
+ public void send_set_schema_version_state(String schemaName, int version, SchemaVersionState state) throws org.apache.thrift.TException
+ {
+ set_schema_version_state_args args = new set_schema_version_state_args();
+ args.setSchemaName(schemaName);
+ args.setVersion(version);
+ args.setState(state);
+ sendBase("set_schema_version_state", args);
+ }
+
+ public void recv_set_schema_version_state() throws NoSuchObjectException, InvalidOperationException, MetaException, org.apache.thrift.TException
+ {
+ set_schema_version_state_result result = new set_schema_version_state_result();
+ receiveBase(result, "set_schema_version_state");
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ if (result.o3 != null) {
+ throw result.o3;
+ }
+ return;
+ }
+
+ public void add_serde(SerDeInfo serde) throws AlreadyExistsException, MetaException, org.apache.thrift.TException
+ {
+ send_add_serde(serde);
+ recv_add_serde();
+ }
+
+ public void send_add_serde(SerDeInfo serde) throws org.apache.thrift.TException
+ {
+ add_serde_args args = new add_serde_args();
+ args.setSerde(serde);
+ sendBase("add_serde", args);
+ }
+
+ public void recv_add_serde() throws AlreadyExistsException, MetaException, org.apache.thrift.TException
+ {
+ add_serde_result result = new add_serde_result();
+ receiveBase(result, "add_serde");
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ return;
+ }
+
+ public SerDeInfo get_serde(String serdeName) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ send_get_serde(serdeName);
+ return recv_get_serde();
+ }
+
+ public void send_get_serde(String serdeName) throws org.apache.thrift.TException
+ {
+ get_serde_args args = new get_serde_args();
+ args.setSerdeName(serdeName);
+ sendBase("get_serde", args);
+ }
+
+ public SerDeInfo recv_get_serde() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ get_serde_result result = new get_serde_result();
+ receiveBase(result, "get_serde");
+ if (result.isSetSuccess()) {
+ return result.success;
+ }
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "get_serde failed: unknown result");
+ }
+
}
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class AsyncClient extends com.facebook.fb303.FacebookService.AsyncClient implements AsyncIface {
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
@@ -12345,6 +12799,475 @@ import org.slf4j.LoggerFactory;
}
}
+ public void create_ischema(ISchema schema, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ create_ischema_call method_call = new create_ischema_call(schema, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class create_ischema_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private ISchema schema;
+ public create_ischema_call(ISchema schema, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.schema = schema;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("create_ischema", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ create_ischema_args args = new create_ischema_args();
+ args.setSchema(schema);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public void getResult() throws AlreadyExistsException, NoSuchObjectException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ (new Client(prot)).recv_create_ischema();
+ }
+ }
+
+ public void alter_ischema(String schemaName, ISchema newSchema, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ alter_ischema_call method_call = new alter_ischema_call(schemaName, newSchema, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class alter_ischema_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String schemaName;
+ private ISchema newSchema;
+ public alter_ischema_call(String schemaName, ISchema newSchema, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.schemaName = schemaName;
+ this.newSchema = newSchema;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("alter_ischema", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ alter_ischema_args args = new alter_ischema_args();
+ args.setSchemaName(schemaName);
+ args.setNewSchema(newSchema);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public void getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ (new Client(prot)).recv_alter_ischema();
+ }
+ }
+
+ public void get_ischema(String schemaName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ get_ischema_call method_call = new get_ischema_call(schemaName, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_ischema_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String schemaName;
+ public get_ischema_call(String schemaName, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.schemaName = schemaName;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("get_ischema", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ get_ischema_args args = new get_ischema_args();
+ args.setSchemaName(schemaName);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public ISchema getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ return (new Client(prot)).recv_get_ischema();
+ }
+ }
+
+ public void drop_ischema(String schemaName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ drop_ischema_call method_call = new drop_ischema_call(schemaName, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class drop_ischema_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String schemaName;
+ public drop_ischema_call(String schemaName, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.schemaName = schemaName;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("drop_ischema", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ drop_ischema_args args = new drop_ischema_args();
+ args.setSchemaName(schemaName);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public void getResult() throws NoSuchObjectException, InvalidOperationException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ (new Client(prot)).recv_drop_ischema();
+ }
+ }
+
+ public void add_schema_version(SchemaVersion schemaVersion, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ add_schema_version_call method_call = new add_schema_version_call(schemaVersion, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class add_schema_version_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private SchemaVersion schemaVersion;
+ public add_schema_version_call(SchemaVersion schemaVersion, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.schemaVersion = schemaVersion;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("add_schema_version", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ add_schema_version_args args = new add_schema_version_args();
+ args.setSchemaVersion(schemaVersion);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public void getResult() throws AlreadyExistsException, NoSuchObjectException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ (new Client(prot)).recv_add_schema_version();
+ }
+ }
+
+ public void get_schema_version(String schemaName, int version, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ get_schema_version_call method_call = new get_schema_version_call(schemaName, version, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_schema_version_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String schemaName;
+ private int version;
+ public get_schema_version_call(String schemaName, int version, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.schemaName = schemaName;
+ this.version = version;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("get_schema_version", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ get_schema_version_args args = new get_schema_version_args();
+ args.setSchemaName(schemaName);
+ args.setVersion(version);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public SchemaVersion getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ return (new Client(prot)).recv_get_schema_version();
+ }
+ }
+
+ public void get_schema_latest_version(String schemaName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ get_schema_latest_version_call method_call = new get_schema_latest_version_call(schemaName, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_schema_latest_version_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String schemaName;
+ public get_schema_latest_version_call(String schemaName, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.schemaName = schemaName;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("get_schema_latest_version", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ get_schema_latest_version_args args = new get_schema_latest_version_args();
+ args.setSchemaName(schemaName);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public SchemaVersion getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ return (new Client(prot)).recv_get_schema_latest_version();
+ }
+ }
+
+ public void get_schema_all_versions(String schemaName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ get_schema_all_versions_call method_call = new get_schema_all_versions_call(schemaName, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_schema_all_versions_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String schemaName;
+ public get_schema_all_versions_call(String schemaName, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.schemaName = schemaName;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("get_schema_all_versions", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ get_schema_all_versions_args args = new get_schema_all_versions_args();
+ args.setSchemaName(schemaName);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public List<SchemaVersion> getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ return (new Client(prot)).recv_get_schema_all_versions();
+ }
+ }
+
+ public void drop_schema_version(String schemaName, int version, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ drop_schema_version_call method_call = new drop_schema_version_call(schemaName, version, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class drop_schema_version_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String schemaName;
+ private int version;
+ public drop_schema_version_call(String schemaName, int version, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.schemaName = schemaName;
+ this.version = version;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("drop_schema_version", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ drop_schema_version_args args = new drop_schema_version_args();
+ args.setSchemaName(schemaName);
+ args.setVersion(version);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public void getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ (new Client(prot)).recv_drop_schema_version();
+ }
+ }
+
+ public void get_schemas_by_cols(FindSchemasByColsRqst rqst, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ get_schemas_by_cols_call method_call = new get_schemas_by_cols_call(rqst, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_schemas_by_cols_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private FindSchemasByColsRqst rqst;
+ public get_schemas_by_cols_call(FindSchemasByColsRqst rqst, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.rqst = rqst;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("get_schemas_by_cols", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ get_schemas_by_cols_args args = new get_schemas_by_cols_args();
+ args.setRqst(rqst);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public FindSchemasByColsResp getResult() throws MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ return (new Client(prot)).recv_get_schemas_by_cols();
+ }
+ }
+
+ public void map_schema_version_to_serde(String schemaName, int version, String serdeName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ map_schema_version_to_serde_call method_call = new map_schema_version_to_serde_call(schemaName, version, serdeName, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class map_schema_version_to_serde_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String schemaName;
+ private int version;
+ private String serdeName;
+ public map_schema_version_to_serde_call(String schemaName, int version, String serdeName, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.schemaName = schemaName;
+ this.version = version;
+ this.serdeName = serdeName;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("map_schema_version_to_serde", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ map_schema_version_to_serde_args args = new map_schema_version_to_serde_args();
+ args.setSchemaName(schemaName);
+ args.setVersion(version);
+ args.setSerdeName(serdeName);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public void getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ (new Client(prot)).recv_map_schema_version_to_serde();
+ }
+ }
+
+ public void set_schema_version_state(String schemaName, int version, SchemaVersionState state, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ set_schema_version_state_call method_call = new set_schema_version_state_call(schemaName, version, state, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class set_schema_version_state_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String schemaName;
+ private int version;
+ private SchemaVersionState state;
+ public set_schema_version_state_call(String schemaName, int version, SchemaVersionState state, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.schemaName = schemaName;
+ this.version = version;
+ this.state = state;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("set_schema_version_state", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ set_schema_version_state_args args = new set_schema_version_state_args();
+ args.setSchemaName(schemaName);
+ args.setVersion(version);
+ args.setState(state);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public void getResult() throws NoSuchObjectException, InvalidOperationException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ (new Client(prot)).recv_set_schema_version_state();
+ }
+ }
+
+ public void add_serde(SerDeInfo serde, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ add_serde_call method_call = new add_serde_call(serde, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class add_serde_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private SerDeInfo serde;
+ public add_serde_call(SerDeInfo serde, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.serde = serde;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("add_serde", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ add_serde_args args = new add_serde_args();
+ args.setSerde(serde);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public void getResult() throws AlreadyExistsException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ (new Client(prot)).recv_add_serde();
+ }
+ }
+
+ public void get_serde(String serdeName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ get_serde_call method_call = new get_serde_call(serdeName, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_serde_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String serdeName;
+ public get_serde_call(String serdeName, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.serdeName = serdeName;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("get_serde", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ get_serde_args args = new get_serde_args();
+ args.setSerdeName(serdeName);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public SerDeInfo getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ return (new Client(prot)).recv_get_serde();
+ }
+ }
+
}
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class Processor<I extends Iface> extends com.facebook.fb303.FacebookService.Processor<I> implements org.apache.thrift.TProcessor {
@@ -12539,6 +13462,20 @@ import org.slf4j.LoggerFactory;
processMap.put("create_or_update_wm_mapping", new create_or_update_wm_mapping());
processMap.put("drop_wm_mapping", new drop_wm_mapping());
processMap.put("create_or_drop_wm_trigger_to_pool_mapping", new create_or_drop_wm_trigger_to_pool_mapping());
+ processMap.put("create_ischema", new create_ischema());
+ processMap.put("alter_ischema", new alter_ischema());
+ processMap.put("get_ischema", new get_ischema());
+ processMap.put("drop_ischema", new drop_ischema());
+ processMap.put("add_schema_version", new add_schema_version());
+ processMap.put("get_schema_version", new get_schema_version());
+ processMap.put("get_schema_latest_version", new get_schema_latest_version());
+ processMap.put("get_schema_all_versions", new get_schema_all_versions());
+ processMap.put("drop_schema_version", new drop_schema_version());
+ processMap.put("get_schemas_by_cols", new get_schemas_by_cols());
+ processMap.put("map_schema_version_to_serde", new map_schema_version_to_serde());
+ processMap.put("set_schema_version_state", new set_schema_version_state());
+ processMap.put("add_serde", new add_serde());
+ processMap.put("get_serde", new get_serde());
return processMap;
}
@@ -17186,6 +18123,376 @@ import org.slf4j.LoggerFactory;
}
}
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class create_ischema<I extends Iface> extends org.apache.thrift.ProcessFunction<I, create_ischema_args> {
+ public create_ischema() {
+ super("create_ischema");
+ }
+
+ public create_ischema_args getEmptyArgsInstance() {
+ return new create_ischema_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public create_ischema_result getResult(I iface, create_ischema_args args) throws org.apache.thrift.TException {
+ create_ischema_result result = new create_ischema_result();
+ try {
+ iface.create_ischema(args.schema);
+ } catch (AlreadyExistsException o1) {
+ result.o1 = o1;
+ } catch (NoSuchObjectException o2) {
+ result.o2 = o2;
+ } catch (MetaException o3) {
+ result.o3 = o3;
+ }
+ return result;
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class alter_ischema<I extends Iface> extends org.apache.thrift.ProcessFunction<I, alter_ischema_args> {
+ public alter_ischema() {
+ super("alter_ischema");
+ }
+
+ public alter_ischema_args getEmptyArgsInstance() {
+ return new alter_ischema_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public alter_ischema_result getResult(I iface, alter_ischema_args args) throws org.apache.thrift.TException {
+ alter_ischema_result result = new alter_ischema_result();
+ try {
+ iface.alter_ischema(args.schemaName, args.newSchema);
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (MetaException o2) {
+ result.o2 = o2;
+ }
+ return result;
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_ischema<I extends Iface> extends org.apache.thrift.ProcessFunction<I, get_ischema_args> {
+ public get_ischema() {
+ super("get_ischema");
+ }
+
+ public get_ischema_args getEmptyArgsInstance() {
+ return new get_ischema_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public get_ischema_result getResult(I iface, get_ischema_args args) throws org.apache.thrift.TException {
+ get_ischema_result result = new get_ischema_result();
+ try {
+ result.success = iface.get_ischema(args.schemaName);
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (MetaException o2) {
+ result.o2 = o2;
+ }
+ return result;
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class drop_ischema<I extends Iface> extends org.apache.thrift.ProcessFunction<I, drop_ischema_args> {
+ public drop_ischema() {
+ super("drop_ischema");
+ }
+
+ public drop_ischema_args getEmptyArgsInstance() {
+ return new drop_ischema_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public drop_ischema_result getResult(I iface, drop_ischema_args args) throws org.apache.thrift.TException {
+ drop_ischema_result result = new drop_ischema_result();
+ try {
+ iface.drop_ischema(args.schemaName);
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (InvalidOperationException o2) {
+ result.o2 = o2;
+ } catch (MetaException o3) {
+ result.o3 = o3;
+ }
+ return result;
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class add_schema_version<I extends Iface> extends org.apache.thrift.ProcessFunction<I, add_schema_version_args> {
+ public add_schema_version() {
+ super("add_schema_version");
+ }
+
+ public add_schema_version_args getEmptyArgsInstance() {
+ return new add_schema_version_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public add_schema_version_result getResult(I iface, add_schema_version_args args) throws org.apache.thrift.TException {
+ add_schema_version_result result = new add_schema_version_result();
+ try {
+ iface.add_schema_version(args.schemaVersion);
+ } catch (AlreadyExistsException o1) {
+ result.o1 = o1;
+ } catch (NoSuchObjectException o2) {
+ result.o2 = o2;
+ } catch (MetaException o3) {
+ result.o3 = o3;
+ }
+ return result;
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_schema_version<I extends Iface> extends org.apache.thrift.ProcessFunction<I, get_schema_version_args> {
+ public get_schema_version() {
+ super("get_schema_version");
+ }
+
+ public get_schema_version_args getEmptyArgsInstance() {
+ return new get_schema_version_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public get_schema_version_result getResult(I iface, get_schema_version_args args) throws org.apache.thrift.TException {
+ get_schema_version_result result = new get_schema_version_result();
+ try {
+ result.success = iface.get_schema_version(args.schemaName, args.version);
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (MetaException o2) {
+ result.o2 = o2;
+ }
+ return result;
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_schema_latest_version<I extends Iface> extends org.apache.thrift.ProcessFunction<I, get_schema_latest_version_args> {
+ public get_schema_latest_version() {
+ super("get_schema_latest_version");
+ }
+
+ public get_schema_latest_version_args getEmptyArgsInstance() {
+ return new get_schema_latest_version_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public get_schema_latest_version_result getResult(I iface, get_schema_latest_version_args args) throws org.apache.thrift.TException {
+ get_schema_latest_version_result result = new get_schema_latest_version_result();
+ try {
+ result.success = iface.get_schema_latest_version(args.schemaName);
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (MetaException o2) {
+ result.o2 = o2;
+ }
+ return result;
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_schema_all_versions<I extends Iface> extends org.apache.thrift.ProcessFunction<I, get_schema_all_versions_args> {
+ public get_schema_all_versions() {
+ super("get_schema_all_versions");
+ }
+
+ public get_schema_all_versions_args getEmptyArgsInstance() {
+ return new get_schema_all_versions_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public get_schema_all_versions_result getResult(I iface, get_schema_all_versions_args args) throws org.apache.thrift.TException {
+ get_schema_all_versions_result result = new get_schema_all_versions_result();
+ try {
+ result.success = iface.get_schema_all_versions(args.schemaName);
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (MetaException o2) {
+ result.o2 = o2;
+ }
+ return result;
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class drop_schema_version<I extends Iface> extends org.apache.thrift.ProcessFunction<I, drop_schema_version_args> {
+ public drop_schema_version() {
+ super("drop_schema_version");
+ }
+
+ public drop_schema_version_args getEmptyArgsInstance() {
+ return new drop_schema_version_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public drop_schema_version_result getResult(I iface, drop_schema_version_args args) throws org.apache.thrift.TException {
+ drop_schema_version_result result = new drop_schema_version_result();
+ try {
+ iface.drop_schema_version(args.schemaName, args.version);
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (MetaException o2) {
+ result.o2 = o2;
+ }
+ return result;
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_schemas_by_cols<I extends Iface> extends org.apache.thrift.ProcessFunction<I, get_schemas_by_cols_args> {
+ public get_schemas_by_cols() {
+ super("get_schemas_by_cols");
+ }
+
+ public get_schemas_by_cols_args getEmptyArgsInstance() {
+ return new get_schemas_by_cols_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public get_schemas_by_cols_result getResult(I iface, get_schemas_by_cols_args args) throws org.apache.thrift.TException {
+ get_schemas_by_cols_result result = new get_schemas_by_cols_result();
+ try {
+ result.success = iface.get_schemas_by_cols(args.rqst);
+ } catch (MetaException o1) {
+ result.o1 = o1;
+ }
+ return result;
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class map_schema_version_to_serde<I extends Iface> extends org.apache.thrift.ProcessFunction<I, map_schema_version_to_serde_args> {
+ public map_schema_version_to_serde() {
+ super("map_schema_version_to_serde");
+ }
+
+ public map_schema_version_to_serde_args getEmptyArgsInstance() {
+ return new map_schema_version_to_serde_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public map_schema_version_to_serde_result getResult(I iface, map_schema_version_to_serde_args args) throws org.apache.thrift.TException {
+ map_schema_version_to_serde_result result = new map_schema_version_to_serde_result();
+ try {
+ iface.map_schema_version_to_serde(args.schemaName, args.version, args.serdeName);
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (MetaException o2) {
+ result.o2 = o2;
+ }
+ return result;
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class set_schema_version_state<I extends Iface> extends org.apache.thrift.ProcessFunction<I, set_schema_version_state_args> {
+ public set_schema_version_state() {
+ super("set_schema_version_state");
+ }
+
+ public set_schema_version_state_args getEmptyArgsInstance() {
+ return new set_schema_version_state_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public set_schema_version_state_result getResult(I iface, set_schema_version_state_args args) throws org.apache.thrift.TException {
+ set_schema_version_state_result result = new set_schema_version_state_result();
+ try {
+ iface.set_schema_version_state(args.schemaName, args.version, args.state);
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (InvalidOperationException o2) {
+ result.o2 = o2;
+ } catch (MetaException o3) {
+ result.o3 = o3;
+ }
+ return result;
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class add_serde<I extends Iface> extends org.apache.thrift.ProcessFunction<I, add_serde_args> {
+ public add_serde() {
+ super("add_serde");
+ }
+
+ public add_serde_args getEmptyArgsInstance() {
+ return new add_serde_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public add_serde_result getResult(I iface, add_serde_args args) throws org.apache.thrift.TException {
+ add_serde_result result = new add_serde_result();
+ try {
+ iface.add_serde(args.serde);
+ } catch (AlreadyExistsException o1) {
+ result.o1 = o1;
+ } catch (MetaException o2) {
+ result.o2 = o2;
+ }
+ return result;
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_serde<I extends Iface> extends org.apache.thrift.ProcessFunction<I, get_serde_args> {
+ public get_serde() {
+ super("get_serde");
+ }
+
+ public get_serde_args getEmptyArgsInstance() {
+ return new get_serde_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public get_serde_result getResult(I iface, get_serde_args args) throws org.apache.thrift.TException {
+ get_serde_result result = new get_serde_result();
+ try {
+ result.success = iface.get_serde(args.serdeName);
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (MetaException o2) {
+ result.o2 = o2;
+ }
+ return result;
+ }
+ }
+
}
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class AsyncProcessor<I extends AsyncIface> extends com.facebook.fb303.FacebookService.AsyncProcessor<I> {
@@ -17380,6 +18687,20 @@ import org.slf4j.LoggerFactory;
processMap.put("create_or_update_wm_mapping", new create_or_update_wm_mapping());
processMap.put("drop_wm_mapping", new drop_wm_mapping());
processMap.put("create_or_drop_wm_trigger_to_pool_mapping", new create_or_drop_wm_trigger_to_pool_mapping());
+ processMap.put("create_ischema", new create_ischema());
+ processMap.put("alter_ischema", new alter_ischema());
+ processMap.put("get_ischema", new get_ischema());
+ processMap.put("drop_ischema", new drop_ischema());
+ processMap.put("add_schema_version", new add_schema_version());
+ processMap.put("get_schema_version", new get_schema_version());
+ processMap.put("get_schema_latest_version", new get_schema_latest_version());
+ processMap.put("get_schema_all_versions", new get_schema_all_versions());
+ processMap.put("drop_schema_version", new drop_schema_version());
+ processMap.put("get_schemas_by_cols", new get_schemas_by_cols());
+ processMap.put("map_schema_version_to_serde", new map_schema_version_to_serde());
+ processMap.put("set_schema_version_state", new set_schema_version_state());
+ processMap.put("add_serde", new add_serde());
+ processMap.put("get_serde", new get_serde());
return processMap;
}
@@ -28470,6 +29791,881 @@ import org.slf4j.LoggerFactory;
}
}
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class create_ischema<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, create_ischema_args, Void> {
+ public create_ischema() {
+ super("create_ischema");
+ }
+
+ public create_ischema_args getEmptyArgsInstance() {
+ return new create_ischema_args();
+ }
+
+ public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+ final org.apache.thrift.AsyncProcessFunction fcall = this;
+ return new AsyncMethodCallback<Void>() {
+ public void onComplete(Void o) {
+ create_ischema_result result = new create_ischema_result();
+ try {
+ fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+ return;
+ } catch (Exception e) {
+ LOGGER.error("Exception writing to internal frame buffer", e);
+ }
+ fb.close();
+ }
+ public void onError(Exception e) {
+ byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+ org.apache.thrift.TBase msg;
+ create_ischema_result result = new create_ischema_result();
+ if (e instanceof AlreadyExistsException) {
+ result.o1 = (AlreadyExistsException) e;
+ result.setO1IsSet(true);
+ msg = result;
+ }
+ else if (e instanceof NoSuchObjectException) {
+ result.o2 = (NoSuchObjectException) e;
+ result.setO2IsSet(true);
+ msg = result;
+ }
+ else if (e instanceof MetaException) {
+ result.o3 = (MetaException) e;
+ result.setO3IsSet(true);
+ msg = result;
+ }
+ else
+ {
+ msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+ msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+ }
+ try {
+ fcall.sendResponse(fb,msg,msgType,seqid);
+ return;
+ } catch (Exception ex) {
+ LOGGER.error("Exception writing to internal frame buffer", ex);
+ }
+ fb.close();
+ }
+ };
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public void start(I iface, create_ischema_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
+ iface.create_ischema(args.schema,resultHandler);
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class alter_ischema<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, alter_ischema_args, Void> {
+ public alter_ischema() {
+ super("alter_ischema");
+ }
+
+ public alter_ischema_args getEmptyArgsInstance() {
+ return new alter_ischema_args();
+ }
+
+ public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+ final org.apache.thrift.AsyncProcessFunction fcall = this;
+ return new AsyncMethodCallback<Void>() {
+ public void onComplete(Void o) {
+ alter_ischema_result result = new alter_ischema_result();
+ try {
+ fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+ return;
+ } catch (Exception e) {
+ LOGGER.error("Exception writing to internal frame buffer", e);
+ }
+ fb.close();
+ }
+ public void onError(Exception e) {
+ byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+ org.apache.thrift.TBase msg;
+ alter_ischema_result result = new alter_ischema_result();
+ if (e instanceof NoSuchObjectException) {
+ result.o1 = (NoSuchObjectException) e;
+ result.setO1IsSet(true);
+ msg = result;
+ }
+ else if (e instanceof MetaException) {
+ result.o2 = (MetaException) e;
+ result.setO2IsSet(true);
+ msg = result;
+ }
+ else
+ {
+ msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+ msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+ }
+ try {
+ fcall.sendResponse(fb,msg,msgType,seqid);
+ return;
+ } catch (Exception ex) {
+ LOGGER.error("Exception writing to internal frame buffer", ex);
+ }
+ fb.close();
+ }
+ };
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public void start(I iface, alter_ischema_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
+ iface.alter_ischema(args.schemaName, args.newSchema,resultHandler);
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_ischema<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, get_ischema_args, ISchema> {
+ public get_ischema() {
+ super("get_ischema");
+ }
+
+ public get_ischema_args getEmptyArgsInstance() {
+ return new get_ischema_args();
+ }
+
+ public AsyncMethodCallback<ISchema> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+ final org.apache.thrift.AsyncProcessFunction fcall = this;
+ return new AsyncMethodCallback<ISchema>() {
+ public void onComplete(ISchema o) {
+ get_ischema_result result = new get_ischema_result();
+ result.success = o;
+ try {
+ fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+ return;
+ } catch (Exception e) {
+ LOGGER.error("Exception writing to internal frame buffer", e);
+ }
+ fb.close();
+ }
+ public void onError(Exception e) {
+ byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+ org.apache.thrift.TBase msg;
+ get_ischema_result result = new get_ischema_result();
+ if (e instanceof NoSuchObjectException) {
+ result.o1 = (NoSuchObjectException) e;
+ result.setO1IsSet(true);
+ msg = result;
+ }
+ else if (e instanceof MetaException) {
+ result.o2 = (MetaException) e;
+ result.setO2IsSet(true);
+ msg = result;
+ }
+ else
+ {
+ msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+ msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+ }
+ try {
+ fcall.sendResponse(fb,msg,msgType,seqid);
+ return;
+ } catch (Exception ex) {
+ LOGGER.error("Exception writing to internal frame buffer", ex);
+ }
+ fb.close();
+ }
+ };
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public void start(I iface, get_ischema_args args, org.apache.thrift.async.AsyncMethodCallback<ISchema> resultHandler) throws TException {
+ iface.get_ischema(args.schemaName,resultHandler);
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class drop_ischema<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, drop_ischema_args, Void> {
+ public drop_ischema() {
+ super("drop_ischema");
+ }
+
+ public drop_ischema_args getEmptyArgsInstance() {
+ return new drop_ischema_args();
+ }
+
+ public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+ final org.apache.thrift.AsyncProcessFunction fcall = this;
+ return new AsyncMethodCallback<Void>() {
+ public void onComplete(Void o) {
+ drop_ischema_result result = new drop_ischema_result();
+ try {
+ fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+ return;
+ } catch (Exception e) {
+ LOGGER.error("Exception writing to internal frame buffer", e);
+ }
+ fb.close();
+ }
+ public void onError(Exception e) {
+ byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+ org.apache.thrift.TBase msg;
+ drop_ischema_result result = new drop_ischema_result();
+ if (e instanceof NoSuchObjectException) {
+ result.o1 = (NoSuchObjectException) e;
+ result.setO1IsSet(true);
+ msg = result;
+ }
+ else if (e instanceof InvalidOperationException) {
+ result.o2 = (InvalidOperationException) e;
+ result.setO2IsSet(true);
+ msg = result;
+ }
+ else if (e instanceof MetaException) {
+ result.o3 = (MetaException) e;
+ result.setO3IsSet(true);
+ msg = result;
+ }
+ else
+ {
+ msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+ msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+ }
+ try {
+ fcall.sendResponse(fb,msg,msgType,seqid);
+ return;
+ } catch (Exception ex) {
+ LOGGER.error("Exception writing to internal frame buffer", ex);
+ }
+ fb.close();
+ }
+ };
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public void start(I iface, drop_ischema_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
+ iface.drop_ischema(args.schemaName,resultHandler);
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class add_schema_version<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, add_schema_version_args, Void> {
+ public add_schema_version() {
+ super("add_schema_version");
+ }
+
+ public add_schema_version_args getEmptyArgsInstance() {
+ return new add_schema_version_args();
+ }
+
+ public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+ final org.apache.thrift.AsyncProcessFunction fcall = this;
+ return new AsyncMethodCallback<Void>() {
+ public void onComplete(Void o) {
+ add_schema_version_result result = new add_schema_version_result();
+ try {
+ fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+ return;
+ } catch (Exception e) {
+ LOGGER.error("Exception writing to internal frame buffer", e);
+ }
+ fb.close();
+ }
+ public void onError(Exception e) {
+ byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+ org.apache.thrift.TBase msg;
+ add_schema_version_result result = new add_schema_version_result();
+ if (e instanceof AlreadyExistsException) {
+ result.o1 = (AlreadyExistsException) e;
+ result.setO1IsSet(true);
+ msg = result;
+ }
+ else if (e instanceof NoSuchObjectException) {
+ result.o2 = (NoSuchObjectException) e;
+ result.setO2IsSet(true);
+ msg = result;
+ }
+ else if (e instanceof MetaException) {
+ result.o3 = (MetaException) e;
+ result.setO3IsSet(true);
+ msg = result;
+ }
+ else
+ {
+ msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+ msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+ }
+ try {
+ fcall.sendResponse(fb,msg,msgType,seqid);
+ return;
+ } catch (Exception ex) {
+ LOGGER.error("Exception writing to internal frame buffer", ex);
+ }
+ fb.close();
+ }
+ };
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public void start(I iface, add_schema_version_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
+ iface.add_schema_version(args.schemaVersion,resultHandler);
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_schema_version<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, get_schema_version_args, SchemaVersion> {
+ public get_schema_version() {
+ super("get_schema_version");
+ }
+
+ public get_schema_version_args getEmptyArgsInstance() {
+ return new get_schema_version_args();
+ }
+
+ public AsyncMethodCallback<SchemaVersion> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+ final org.apache.thrift.AsyncProcessFunction fcall = this;
+ return new AsyncMethodCallback<SchemaVersion>() {
+ public void onComplete(SchemaVersion o) {
+ get_schema_version_result result = new get_schema_version_result();
+ result.success = o;
+ try {
+ fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+ return;
+ } catch (Exception e) {
+ LOGGER.error("Exception writing to internal frame buffer", e);
+ }
+ fb.close();
+ }
+ public void onError(Exception e) {
+ byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+ org.apache.thrift.TBase msg;
+ get_schema_version_result result = new get_schema_version_result();
+ if (e instanceof NoSuchObjectException) {
+ result.o1 = (NoSuchObjectException) e;
+ result.setO1IsSet(true);
+ msg = result;
+ }
+ else if (e instanceof MetaException) {
+ result.o2 = (MetaException) e;
+ result.setO2IsSet(true);
+ msg = result;
+ }
+ else
+ {
+ msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+ msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+ }
+ try {
+ fcall.sendResponse(fb,msg,msgType,seqid);
+ return;
+ } catch (Exception ex) {
+ LOGGER.error("Exception writing to internal frame buffer", ex);
+ }
+ fb.close();
+ }
+ };
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public void start(I iface, get_schema_version_args args, org.apache.thrift.async.AsyncMethodCallback<SchemaVersion> resultHandler) throws TException {
+ iface.get_schema_version(args.schemaName, args.version,resultHandler);
+ }
+ }
+
+ @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public static class get_schema_latest_version<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, get_schema_latest_version_args, SchemaVersion> {
+ public get_schema_latest_version() {
+ super("get_schema_latest_version");
+ }
+
+ public get_schema_latest_version_args getEmptyArgsInstance() {
+ return new get_schema_latest_version_args();
+ }
+
+ public AsyncMethodCallback<SchemaVersion> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+ final org.apache.thrift.AsyncProcessFunction fcall = this;
+ return new AsyncMethodCallback<SchemaVersion>() {
+ public void onComplete(SchemaVersion o) {
+ get_schema_latest_version_result result = new get_schema_latest_version_result();
+ result.success = o;
+ try {
+ fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+ return;
+ } catch (Exception e) {
+ LOGGER.error("Exception writing to internal frame buffer", e);
+ }
+ fb.close();
+ }
+ public void onError(Exception e) {
+ byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+ org.apache.thrift.TBase msg;
+ get_schema_latest_version_result result = new get_schema_latest_version_result();
+ if (e instanceof NoSuchObjectException) {
+ result.o1 = (NoSuchObjectException) e;
+ result.setO1IsSet(true);
+ msg = result;
+ }
+ else if (e instanceof MetaException) {
+ result.o2 = (MetaException) e;
+ result.setO2IsSet(true);
+ msg = result;
+ }
+ else
+ {
<TRUNCATED>