You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ng...@apache.org on 2021/04/06 15:39:55 UTC

[hive] 11/38: HIVE-24396: Added schema changes for Oracle Made DBS.TYPE NOT NULL in all scripts Added Type support to DatabaseBuilder Added Unit test for DataConnector Added Unit test REMOTE Database Fixed test failures in TestSchemaToolForMetaStore

This is an automated email from the ASF dual-hosted git repository.

ngangam pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit 7d91a9a297cf9409383a712dcb20bf55c0886b3d
Author: Naveen Gangam <ng...@cloudera.com>
AuthorDate: Fri Nov 20 11:34:53 2020 -0500

    HIVE-24396: Added schema changes for Oracle
                Made DBS.TYPE NOT NULL in all scripts
                Added Type support to DatabaseBuilder
                Added Unit test for DataConnector
                Added Unit test REMOTE Database
                Fixed test failures in TestSchemaToolForMetaStore
---
 .../src/main/thrift/hive_metastore.thrift          |  12 +-
 .../metastore/client/builder/DatabaseBuilder.java  |  25 +++-
 .../src/main/sql/derby/hive-schema-4.0.0.derby.sql |   2 +-
 .../sql/derby/upgrade-3.2.0-to-4.0.0.derby.sql     |   3 +-
 .../src/main/sql/mysql/hive-schema-4.0.0.mysql.sql |   2 +-
 .../sql/mysql/upgrade-3.2.0-to-4.0.0.mysql.sql     |   4 +-
 .../main/sql/oracle/hive-schema-4.0.0.oracle.sql   |  25 +++-
 .../sql/oracle/upgrade-3.2.0-to-4.0.0.oracle.sql   |  25 ++++
 .../sql/postgres/hive-schema-4.0.0.postgres.sql    |   2 +-
 .../postgres/upgrade-3.2.0-to-4.0.0.postgres.sql   |   4 +-
 .../hadoop/hive/metastore/TestHiveMetaStore.java   | 136 +++++++++++++++++++++
 .../schematool/TestSchemaToolForMetastore.java     |  18 +--
 12 files changed, 233 insertions(+), 25 deletions(-)

diff --git a/standalone-metastore/metastore-common/src/main/thrift/hive_metastore.thrift b/standalone-metastore/metastore-common/src/main/thrift/hive_metastore.thrift
index a91a140..9e43101 100644
--- a/standalone-metastore/metastore-common/src/main/thrift/hive_metastore.thrift
+++ b/standalone-metastore/metastore-common/src/main/thrift/hive_metastore.thrift
@@ -919,12 +919,12 @@ struct GetPartitionsByNamesResult {
 }
 
 struct DataConnector {
-  1: string name
-  2: string type
-  3: string url
-  4: optional string description
-  5: optional map<string,string> parameters
-  6: optional string ownerName
+  1: string name,
+  2: string type,
+  3: string url,
+  4: optional string description,
+  5: optional map<string,string> parameters,
+  6: optional string ownerName,
   7: optional PrincipalType ownerType,
   8: optional i32 createTime
 }
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/client/builder/DatabaseBuilder.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/client/builder/DatabaseBuilder.java
index 806bf0f..8cd4b85 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/client/builder/DatabaseBuilder.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/client/builder/DatabaseBuilder.java
@@ -44,6 +44,7 @@ public class DatabaseBuilder {
   private PrincipalType ownerType;
   private int createTime;
   private DatabaseType type;
+  private String connectorName, remoteDBName;
 
   public DatabaseBuilder() {
   }
@@ -109,6 +110,16 @@ public class DatabaseBuilder {
     return this;
   }
 
+  public DatabaseBuilder setConnectorName(String connectorName) {
+    this.connectorName = connectorName;
+    return this;
+  }
+
+  public DatabaseBuilder setRemoteDBName(String remoteDBName) {
+    this.remoteDBName = remoteDBName;
+    return this;
+  }
+
   public Database build(Configuration conf) throws MetaException {
     if (name == null) throw new MetaException("You must name the database");
     if (catalogName == null) catalogName = MetaStoreUtils.getDefaultCatalog(conf);
@@ -122,7 +133,19 @@ public class DatabaseBuilder {
       db.setOwnerName(ownerName);
       if (ownerType == null) ownerType = PrincipalType.USER;
       db.setOwnerType(ownerType);
-      if (type == null) type = DatabaseType.NATIVE;
+      if (type == null) {
+        type = DatabaseType.NATIVE;
+        if (connectorName != null || remoteDBName != null) {
+          throw new MetaException("connector name or remoteDBName cannot be set for database of type NATIVE");
+        }
+      } else if (type == DatabaseType.REMOTE) {
+        if (connectorName == null)
+          throw new MetaException("connector name cannot be null for database of type REMOTE");
+        db.setConnector_name(connectorName);
+        if (remoteDBName != null) {
+          db.setRemote_dbname(remoteDBName);
+        }
+      }
       db.setType(type);
       return db;
     } catch (IOException e) {
diff --git a/standalone-metastore/metastore-server/src/main/sql/derby/hive-schema-4.0.0.derby.sql b/standalone-metastore/metastore-server/src/main/sql/derby/hive-schema-4.0.0.derby.sql
index f51b712..9e9e7ea 100644
--- a/standalone-metastore/metastore-server/src/main/sql/derby/hive-schema-4.0.0.derby.sql
+++ b/standalone-metastore/metastore-server/src/main/sql/derby/hive-schema-4.0.0.derby.sql
@@ -25,7 +25,7 @@ CREATE TABLE "APP"."DBS" (
   "CTLG_NAME" VARCHAR(256) NOT NULL DEFAULT 'hive',
   "CREATE_TIME" INTEGER,
   "DB_MANAGED_LOCATION_URI" VARCHAR(4000),
-  "TYPE" VARCHAR(32) DEFAULT 'NATIVE',
+  "TYPE" VARCHAR(32) DEFAULT 'NATIVE' NOT NULL,
   "DATACONNECTOR_NAME" VARCHAR(128),
   "REMOTE_DBNAME" VARCHAR(128)
 );
diff --git a/standalone-metastore/metastore-server/src/main/sql/derby/upgrade-3.2.0-to-4.0.0.derby.sql b/standalone-metastore/metastore-server/src/main/sql/derby/upgrade-3.2.0-to-4.0.0.derby.sql
index ed37748..a31e663 100644
--- a/standalone-metastore/metastore-server/src/main/sql/derby/upgrade-3.2.0-to-4.0.0.derby.sql
+++ b/standalone-metastore/metastore-server/src/main/sql/derby/upgrade-3.2.0-to-4.0.0.derby.sql
@@ -151,8 +151,9 @@ ALTER TABLE COMPLETED_COMPACTIONS ADD CC_WORKER_VERSION varchar(128);
 CREATE TABLE "APP"."DATACONNECTORS" ("DC_NAME" VARCHAR(128) NOT NULL, "TYPE" VARCHAR(128) NOT NULL, "COMMENT" VARCHAR(256), "OWNER_NAME" VARCHAR(256), "OWNER_TYPE" VARCHAR(10), "CREATE_TIME" INTEGER);
 CREATE TABLE "APP"."DATACONNECTORS" ("DC_NAME" VARCHAR(128) NOT NULL, "TYPE" VARCHAR(128) NOT NULL, "COMMENT" VARCHAR(256), "OWNER_NAME" VARCHAR(256), "OWNER_TYPE" VARCHAR(10), "CREATE_TIME" INTEGER NOT NULL);
 CREATE TABLE "APP"."DATACONNECTOR_PARAMS" ("DC_NAME" VARCHAR(128) NOT NULL, "PARAM_KEY" VARCHAR(180) NOT NULL, "PARAM_VALUE" VARCHAR(4000), "COMMENT" VARCHAR(256));
-ALTER TABLE "APP"."DBS" ADD COLUMN "TYPE" VARCHAR(32) DEFAULT 'NATIVE';
+ALTER TABLE "APP"."DBS" ADD COLUMN "TYPE" VARCHAR(32) DEFAULT 'NATIVE' NOT NULL;
 ALTER TABLE "APP"."DBS" ADD COLUMN "DATACONNECTOR_NAME" VARCHAR(128);
+ALTER TABLE "APP"."DBS" ADD COLUMN "REMOTE_DBNAME" VARCHAR(128);
 UPDATE "APP"."DBS" SET TYPE='NATIVE' WHERE TYPE IS NULL;
 ALTER TABLE "APP"."DATACONNECTORS" ADD CONSTRAINT "DATACONNECTORS_KEY_PK" PRIMARY KEY ("DC_NAME");
 ALTER TABLE "APP"."DATACONNECTOR_PARAMS" ADD CONSTRAINT "DATACONNECTOR_PARAMS_KEY_PK" PRIMARY KEY ("DC_NAME", "PARAM_KEY");
diff --git a/standalone-metastore/metastore-server/src/main/sql/mysql/hive-schema-4.0.0.mysql.sql b/standalone-metastore/metastore-server/src/main/sql/mysql/hive-schema-4.0.0.mysql.sql
index 48e6f7d..97ed533 100644
--- a/standalone-metastore/metastore-server/src/main/sql/mysql/hive-schema-4.0.0.mysql.sql
+++ b/standalone-metastore/metastore-server/src/main/sql/mysql/hive-schema-4.0.0.mysql.sql
@@ -105,7 +105,7 @@ CREATE TABLE IF NOT EXISTS `DBS` (
   `CTLG_NAME` varchar(256) NOT NULL DEFAULT 'hive',
   `CREATE_TIME` INT(11),
   `DB_MANAGED_LOCATION_URI` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin,
-  `TYPE` VARCHAR(32) DEFAULT 'NATIVE',
+  `TYPE` VARCHAR(32) DEFAULT 'NATIVE' NOT NULL,
   `DATACONNECTOR_NAME` VARCHAR(128),
   `REMOTE_DBNAME` VARCHAR(128),
   PRIMARY KEY (`DB_ID`),
diff --git a/standalone-metastore/metastore-server/src/main/sql/mysql/upgrade-3.2.0-to-4.0.0.mysql.sql b/standalone-metastore/metastore-server/src/main/sql/mysql/upgrade-3.2.0-to-4.0.0.mysql.sql
index e86c8b0..03bef05 100644
--- a/standalone-metastore/metastore-server/src/main/sql/mysql/upgrade-3.2.0-to-4.0.0.mysql.sql
+++ b/standalone-metastore/metastore-server/src/main/sql/mysql/upgrade-3.2.0-to-4.0.0.mysql.sql
@@ -175,10 +175,10 @@ CREATE TABLE `DATACONNECTOR_PARAMS` (
   CONSTRAINT `DATACONNECTOR_NAME_FK1` FOREIGN KEY (`NAME`) REFERENCES `DATACONNECTORS` (`NAME`) ON DELETE CASCADE
 ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
 
-ALTER TABLE `DBS` ADD COLUMN `TYPE` VARCHAR(32) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT 'NATIVE';
-UPDATE `DBS` SET `TYPE` = 'NATIVE' WHERE `TYPE` IS NULL;
+ALTER TABLE `DBS` ADD COLUMN `TYPE` VARCHAR(32) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT 'NATIVE' NOT NULL;
 ALTER TABLE `DBS` ADD COLUMN `DATACONNECTOR_NAME` VARCHAR(128) CHARACTER SET latin1 COLLATE latin1_bin;
 ALTER TABLE `DBS` ADD COLUMN `REMOTE_DBNAME` VARCHAR(128) CHARACTER SET latin1 COLLATE latin1_bin;
+UPDATE `DBS` SET `TYPE` = 'NATIVE' WHERE `TYPE` IS NULL;
 
 -- These lines need to be last.  Insert any changes above.
 UPDATE VERSION SET SCHEMA_VERSION='4.0.0', VERSION_COMMENT='Hive release version 4.0.0' where VER_ID=1;
diff --git a/standalone-metastore/metastore-server/src/main/sql/oracle/hive-schema-4.0.0.oracle.sql b/standalone-metastore/metastore-server/src/main/sql/oracle/hive-schema-4.0.0.oracle.sql
index 1292aee..fbe6266 100644
--- a/standalone-metastore/metastore-server/src/main/sql/oracle/hive-schema-4.0.0.oracle.sql
+++ b/standalone-metastore/metastore-server/src/main/sql/oracle/hive-schema-4.0.0.oracle.sql
@@ -102,7 +102,10 @@ CREATE TABLE DBS
     OWNER_TYPE VARCHAR2(10) NULL,
     CTLG_NAME VARCHAR2(256) DEFAULT 'hive' NOT NULL,
     CREATE_TIME NUMBER (10),
-    DB_MANAGED_LOCATION_URI VARCHAR2(4000) NULL
+    DB_MANAGED_LOCATION_URI VARCHAR2(4000) NULL,
+    TYPE VARCHAR2(32) DEFAULT 'NATIVE' NOT NULL,
+    DATACONNECTOR_NAME VARCHAR2(128) NULL,
+    REMOTE_DBNAME VARCHAR2(128) NULL
 );
 
 ALTER TABLE DBS ADD CONSTRAINT DBS_PK PRIMARY KEY (DB_ID);
@@ -1279,6 +1282,26 @@ CREATE TABLE "PACKAGES" (
 CREATE UNIQUE INDEX UNIQUEPKG ON PACKAGES ("NAME", "DB_ID");
 ALTER TABLE "PACKAGES" ADD CONSTRAINT "PACKAGES_FK1" FOREIGN KEY ("DB_ID") REFERENCES "DBS" ("DB_ID");
 
+-- HIVE-24396
+-- Create DataConnectors and DataConnector_Params tables
+CREATE TABLE DATACONNECTORS (
+  NAME VARCHAR2(128) NOT NULL,
+  TYPE VARCHAR2(32) NOT NULL,
+  URL VARCHAR2(4000) NOT NULL,
+  "COMMENT" VARCHAR2(256),
+  OWNER_NAME VARCHAR2(256),
+  OWNER_TYPE VARCHAR2(10),
+  CREATE_TIME NUMBER(10) NOT NULL,
+  PRIMARY KEY (NAME)
+);
+
+CREATE TABLE DATACONNECTOR_PARAMS (
+  NAME VARCHAR2(128) NOT NULL,
+  PARAM_KEY VARCHAR2(180) NOT NULL,
+  PARAM_VALUE VARCHAR2(4000),
+  PRIMARY KEY (NAME, PARAM_KEY),
+  CONSTRAINT DATACONNECTOR_NAME_FK1 FOREIGN KEY (NAME) REFERENCES DATACONNECTORS (NAME) ON DELETE CASCADE
+);
 
 -- -----------------------------------------------------------------
 -- Record schema version. Should be the last step in the init script
diff --git a/standalone-metastore/metastore-server/src/main/sql/oracle/upgrade-3.2.0-to-4.0.0.oracle.sql b/standalone-metastore/metastore-server/src/main/sql/oracle/upgrade-3.2.0-to-4.0.0.oracle.sql
index eb1faa0..e0354f1 100644
--- a/standalone-metastore/metastore-server/src/main/sql/oracle/upgrade-3.2.0-to-4.0.0.oracle.sql
+++ b/standalone-metastore/metastore-server/src/main/sql/oracle/upgrade-3.2.0-to-4.0.0.oracle.sql
@@ -146,6 +146,31 @@ CREATE INDEX CTLG_NAME_DBS ON DBS(CTLG_NAME);
 
 -- HIVE-24770
 UPDATE SERDES SET SLIB='org.apache.hadoop.hive.serde2.MultiDelimitSerDe' where SLIB='org.apache.hadoop.hive.contrib.serde2.MultiDelimitSerDe';
+=======
+-- HIVE-24396
+-- Create DataConnectors and DataConnector_Params tables
+CREATE TABLE DATACONNECTORS (
+  NAME VARCHAR2(128) NOT NULL,
+  TYPE VARCHAR2(32) NOT NULL,
+  URL VARCHAR2(4000) NOT NULL,
+  "COMMENT" VARCHAR2(256),
+  OWNER_NAME VARCHAR2(256),
+  OWNER_TYPE VARCHAR2(10),
+  CREATE_TIME NUMBER(10) NOT NULL,
+  PRIMARY KEY (NAME)
+);
+
+CREATE TABLE DATACONNECTOR_PARAMS (
+  NAME VARCHAR2(128) NOT NULL,
+  PARAM_KEY VARCHAR2(180) NOT NULL,
+  PARAM_VALUE VARCHAR2(4000),
+  PRIMARY KEY (NAME, PARAM_KEY),
+  CONSTRAINT DATACONNECTOR_NAME_FK1 FOREIGN KEY (NAME) REFERENCES DATACONNECTORS (NAME) ON DELETE CASCADE
+);
+ALTER TABLE DBS ADD TYPE VARCHAR2(32) DEFAULT 'NATIVE' NOT NULL;
+ALTER TABLE DBS ADD DATACONNECTOR_NAME VARCHAR2(128) NULL;
+ALTER TABLE DBS ADD REMOTE_DBNAME VARCHAR2(128) NULL;
+UPDATE DBS SET TYPE='NATIVE' WHERE TYPE IS NULL;
 
 -- HIVE-24880
 ALTER TABLE COMPACTION_QUEUE ADD CQ_INITIATOR_ID varchar(128);
diff --git a/standalone-metastore/metastore-server/src/main/sql/postgres/hive-schema-4.0.0.postgres.sql b/standalone-metastore/metastore-server/src/main/sql/postgres/hive-schema-4.0.0.postgres.sql
index 998b66e..b86628b 100644
--- a/standalone-metastore/metastore-server/src/main/sql/postgres/hive-schema-4.0.0.postgres.sql
+++ b/standalone-metastore/metastore-server/src/main/sql/postgres/hive-schema-4.0.0.postgres.sql
@@ -84,7 +84,7 @@ CREATE TABLE "DBS" (
     "CTLG_NAME" varchar(256) DEFAULT 'hive' NOT NULL,
     "CREATE_TIME" bigint,
     "DB_MANAGED_LOCATION_URI" character varying(4000),
-    "TYPE" character varying(32) DEFAULT 'NATIVE',
+    "TYPE" character varying(32) DEFAULT 'NATIVE' NOT NULL,
     "DATACONNECTOR_NAME" character varying(128),
     "REMOTE_DBNAME" character varying(128)
 );
diff --git a/standalone-metastore/metastore-server/src/main/sql/postgres/upgrade-3.2.0-to-4.0.0.postgres.sql b/standalone-metastore/metastore-server/src/main/sql/postgres/upgrade-3.2.0-to-4.0.0.postgres.sql
index afbdacf..6f6d5a7 100644
--- a/standalone-metastore/metastore-server/src/main/sql/postgres/upgrade-3.2.0-to-4.0.0.postgres.sql
+++ b/standalone-metastore/metastore-server/src/main/sql/postgres/upgrade-3.2.0-to-4.0.0.postgres.sql
@@ -298,10 +298,10 @@ CREATE TABLE "DATACONNECTOR_PARAMS" (
   PRIMARY KEY ("NAME", "PARAM_KEY"),
   CONSTRAINT "DATACONNECTOR_NAME_FK1" FOREIGN KEY ("NAME") REFERENCES "DATACONNECTORS"("NAME") ON DELETE CASCADE
 );
-ALTER TABLE "DBS" ADD "TYPE" character varying(32);
-UPDATE "DBS" SET "TYPE"= 'NATIVE' WHERE "TYPE" IS NULL;
+ALTER TABLE "DBS" ADD "TYPE" character varying(32) DEFAULT 'NATIVE' NOT NULL;
 ALTER TABLE "DBS" ADD "DATACONNECTOR_NAME" character varying(128);
 ALTER TABLE "DBS" ADD "REMOTE_DBNAME" character varying(128);
+UPDATE "DBS" SET "TYPE"= 'NATIVE' WHERE "TYPE" IS NULL;
 
 -- These lines need to be last. Insert any changes above.
 UPDATE "VERSION" SET "SCHEMA_VERSION"='4.0.0', "VERSION_COMMENT"='Hive release version 4.0.0' where "VER_ID"=1;
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
index 75b0fcf..0c740ab 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
@@ -44,6 +44,8 @@ import java.lang.reflect.*;
 import static org.mockito.Mockito.mock;
 
 import com.google.common.collect.Sets;
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.metastore.api.DatabaseType;
 import org.apache.hadoop.hive.metastore.api.GetPartitionsFilterSpec;
 import org.apache.hadoop.hive.metastore.api.GetProjectionsSpec;
 import org.apache.hadoop.hive.metastore.api.GetPartitionsRequest;
@@ -55,6 +57,7 @@ import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder;
 import org.apache.hadoop.hive.metastore.client.builder.TableBuilder;
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
+import org.apache.hadoop.hive.metastore.dataconnector.jdbc.AbstractJDBCConnectorProvider;
 import org.apache.hadoop.hive.metastore.utils.FileUtils;
 import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils;
 import org.apache.hadoop.hive.metastore.utils.MetastoreVersionInfo;
@@ -105,6 +108,7 @@ import com.google.common.collect.Lists;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
@@ -3488,4 +3492,136 @@ public abstract class TestHiveMetaStore {
 
     silentDropDatabase(dbName);
   }
+
+  @Test
+  public void testDataConnector() throws Throwable {
+    final String connector_name1 = "test_connector1";
+    final String connector_name2 = "test_connector2";
+    final String mysql_type = "mysql";
+    final String mysql_url = "jdbc:mysql://nightly1.apache.org:3306/hive1";
+    final String postgres_type = "postgres";
+    final String postgres_url = "jdbc:postgresql://localhost:5432";
+
+    try {
+      DataConnector connector = new DataConnector(connector_name1, mysql_type, mysql_url);
+      Map<String, String> params = new HashMap<>();
+      params.put(AbstractJDBCConnectorProvider.JDBC_USERNAME, "hive");
+      params.put(AbstractJDBCConnectorProvider.JDBC_PASSWORD, "hive");
+      connector.setParameters(params);
+      client.createDataConnector(connector);
+
+      DataConnector dConn = client.getDataConnector(connector_name1);
+      assertNotNull(dConn);
+      assertEquals("name of returned data connector is different from that of inserted connector", connector_name1,
+          dConn.getName());
+      assertEquals("type of data connector returned is different from the type inserted", mysql_type, dConn.getType());
+      assertEquals("url of the data connector returned is different from the url inserted", mysql_url, dConn.getUrl());
+      assertEquals(SecurityUtils.getUser(), dConn.getOwnerName());
+      assertEquals(PrincipalType.USER, dConn.getOwnerType());
+      assertNotEquals("Size of data connector parameters not as expected", 0, dConn.getParametersSize());
+
+      try {
+        client.createDataConnector(connector);
+        fail("Creating duplicate connector should fail");
+      } catch (Exception e) { /* as expected */ }
+
+      connector = new DataConnector(connector_name2, postgres_type, postgres_url);
+      params = new HashMap<>();
+      params.put(AbstractJDBCConnectorProvider.JDBC_USERNAME, "hive");
+      params.put(AbstractJDBCConnectorProvider.JDBC_PASSWORD, "hive");
+      connector.setParameters(params);
+      client.createDataConnector(connector);
+
+      dConn = client.getDataConnector(connector_name2);
+      assertEquals("name of returned data connector is different from that of inserted connector", connector_name2,
+          dConn.getName());
+      assertEquals("type of data connector returned is different from the type inserted", postgres_type, dConn.getType());
+      assertEquals("url of the data connector returned is different from the url inserted", postgres_url, dConn.getUrl());
+
+      List<String> connectors = client.getAllDataConnectors();
+      assertEquals("Number of dataconnectors returned is not as expected", 2, connectors.size());
+
+      DataConnector connector1 = new DataConnector(connector);
+      connector1.setUrl(mysql_url);
+      client.alterDataConnector(connector.getName(), connector1);
+
+      dConn = client.getDataConnector(connector.getName());
+      assertEquals("url of the data connector returned is different from the url inserted", mysql_url, dConn.getUrl());
+
+      // alter data connector parameters
+      params.put(AbstractJDBCConnectorProvider.JDBC_NUM_PARTITIONS, "5");
+      connector1.setParameters(params);
+      client.alterDataConnector(connector.getName(), connector1);
+
+      dConn = client.getDataConnector(connector.getName());
+      assertEquals("Size of data connector parameters not as expected", 3, dConn.getParametersSize());
+
+      // alter data connector parameters
+      connector1.setOwnerName("hiveadmin");
+      connector1.setOwnerType(PrincipalType.ROLE);
+      client.alterDataConnector(connector.getName(), connector1);
+
+      dConn = client.getDataConnector(connector.getName());
+      assertEquals("Data connector owner name not as expected", "hiveadmin", dConn.getOwnerName());
+      assertEquals("Data connector owner type not as expected", PrincipalType.ROLE, dConn.getOwnerType());
+
+      client.dropDataConnector(connector_name1, false, false);
+      connectors = client.getAllDataConnectors();
+      assertEquals("Number of dataconnectors returned is not as expected", 1, connectors.size());
+
+      client.dropDataConnector(connector_name2, false, false);
+      connectors = client.getAllDataConnectors();
+      assertEquals("Number of dataconnectors returned is not as expected", 0, connectors.size());
+    } catch (Throwable e) {
+      System.err.println(StringUtils.stringifyException(e));
+      System.err.println("testDataConnector() failed.");
+      throw e;
+    }
+  }
+
+  @Test
+  public void testRemoteDatabase() throws Throwable {
+    final String connector_name1 = "test_connector1";
+    final String mysql_type = "mysql";
+    final String mysql_url = "jdbc:mysql://nightly1.apache.org:3306/hive1";
+    final String db_name = "mysql_remote";
+    final String db2 = "mysql_dup";
+
+    try {
+      DataConnector connector = new DataConnector(connector_name1, mysql_type, mysql_url);
+      Map<String, String> params = new HashMap<>();
+      params.put(AbstractJDBCConnectorProvider.JDBC_USERNAME, "hive");
+      params.put(AbstractJDBCConnectorProvider.JDBC_PASSWORD, "hive");
+      connector.setParameters(params);
+      client.createDataConnector(connector);
+
+      DataConnector dConn = client.getDataConnector(connector_name1);
+      new DatabaseBuilder().setName(db_name).setType(DatabaseType.REMOTE).setConnectorName(connector_name1)
+          .setRemoteDBName(db_name).create(client, conf);
+
+      Database db = client.getDatabase(db_name);
+      assertNotNull(db);
+      assertEquals(db.getType(), DatabaseType.REMOTE);
+      assertEquals(db.getConnector_name(), connector_name1);
+      assertEquals(db.getRemote_dbname(), db_name);
+
+      // new db in hive pointing to same remote db.
+      new DatabaseBuilder().setName(db2).setType(DatabaseType.REMOTE).setConnectorName(connector_name1)
+          .setRemoteDBName(db_name).create(client, conf);
+
+      db = client.getDatabase(db2);
+      assertNotNull(db);
+      assertEquals(db.getType(), DatabaseType.REMOTE);
+      assertEquals(db.getConnector_name(), connector_name1);
+      assertEquals(db.getRemote_dbname(), db_name);
+
+      client.dropDataConnector(connector_name1, false, false);
+      client.dropDatabase(db_name);
+      client.dropDatabase(db2);
+    } catch (Throwable e) {
+      System.err.println(StringUtils.stringifyException(e));
+      System.err.println("testRemoteDatabase() failed.");
+      throw e;
+    }
+  }
 }
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestSchemaToolForMetastore.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestSchemaToolForMetastore.java
index 709c90c..f6a029a 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestSchemaToolForMetastore.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestSchemaToolForMetastore.java
@@ -111,7 +111,7 @@ public class TestSchemaToolForMetastore {
     String[] scripts = new String[] {
         "insert into CTLGS values(99, 'test_cat_1', 'description', 'hdfs://myhost.com:8020/user/hive/warehouse/mydb', " + time + ");",
         "insert into SEQUENCE_TABLE values('org.apache.hadoop.hive.metastore.model.MDatabase', 100);",
-        "insert into DBS values(99, 'test db1', 'hdfs:///tmp/ext', 'db1', 'test', 'test', 'test_cat_1', " + time + ", 'hdfs:///tmp/mgd');"
+        "insert into DBS values(99, 'test db1', 'hdfs:///tmp/ext', 'db1', 'test', 'test', 'test_cat_1', " + time + ", 'hdfs:///tmp/mgd', 'NATIVE', '', '');"
     };
     File scriptFile = generateTestScript(scripts);
     schemaTool.execSql(scriptFile.getPath());
@@ -123,7 +123,7 @@ public class TestSchemaToolForMetastore {
         "delete from SEQUENCE_TABLE;",
         "delete from DBS;",
         "insert into SEQUENCE_TABLE values('org.apache.hadoop.hive.metastore.model.MDatabase', 100);",
-        "insert into DBS values(102, 'test db1', 'hdfs:///tmp/ext', 'db1', 'test', 'test', 'test_cat_1', " + time + ", 'hdfs:///tmp/mgd');"
+        "insert into DBS values(102, 'test db1', 'hdfs:///tmp/ext', 'db1', 'test', 'test', 'test_cat_1', " + time + ", 'hdfs:///tmp/mgd', 'NATIVE', '', '');"
     };
     scriptFile = generateTestScript(scripts);
     schemaTool.execSql(scriptFile.getPath());
@@ -388,8 +388,8 @@ public class TestSchemaToolForMetastore {
     // Test valid case
     String[] scripts = new String[] {
         "insert into CTLGS values(3, 'test_cat_2', 'description', 'hdfs://myhost.com:8020/user/hive/warehouse/mydb', " + time + ");",
-        "insert into DBS values(2, 'my db', 'hdfs://myhost.com:8020/user/hive/warehouse/mydb', 'mydb', 'public', 'role', 'test_cat_2', " + time + ", '');",
-        "insert into DBS values(7, 'db with bad port', 'hdfs://myhost.com:8020/', 'haDB', 'public', 'role', 'test_cat_2', " + time + ", '');",
+        "insert into DBS values(2, 'my db', 'hdfs://myhost.com:8020/user/hive/warehouse/mydb', 'mydb', 'public', 'role', 'test_cat_2', " + time + ", '', 'NATIVE', '', '');",
+        "insert into DBS values(7, 'db with bad port', 'hdfs://myhost.com:8020/', 'haDB', 'public', 'role', 'test_cat_2', " + time + ", '', 'NATIVE', '', '');",
         "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (1,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020/user/hive/warehouse/mydb',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null);",
         "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (2,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020/user/admin/2015_11_18',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null);",
         "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (3,null,'org.apache.hadoop.mapred.TextInputFormat','N','N',null,-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null);",
@@ -416,10 +416,10 @@ public class TestSchemaToolForMetastore {
         "delete from TBLS;",
         "delete from SDS;",
         "delete from DBS;",
-        "insert into DBS values(2, 'my db', '/user/hive/warehouse/mydb', 'mydb', 'public', 'role', 'test_cat_2', " + time + ", '');",
-        "insert into DBS values(4, 'my db2', 'hdfs://myhost.com:8020', '', 'public', 'role', 'test_cat_2', " + time + ", '');",
-        "insert into DBS values(6, 'db with bad port', 'hdfs://myhost.com:8020:', 'zDB', 'public', 'role', 'test_cat_2', " + time + ", '');",
-        "insert into DBS values(7, 'db with bad port', 'hdfs://mynameservice.com/', 'haDB', 'public', 'role', 'test_cat_2', " + time + ", '');",
+        "insert into DBS values(2, 'my db', '/user/hive/warehouse/mydb', 'mydb', 'public', 'role', 'test_cat_2', " + time + ", '', 'NATIVE', '', '');",
+        "insert into DBS values(4, 'my db2', 'hdfs://myhost.com:8020', '', 'public', 'role', 'test_cat_2', " + time + ", '', 'NATIVE', '', '');",
+        "insert into DBS values(6, 'db with bad port', 'hdfs://myhost.com:8020:', 'zDB', 'public', 'role', 'test_cat_2', " + time + ", '', 'NATIVE', '', '');",
+        "insert into DBS values(7, 'db with bad port', 'hdfs://mynameservice.com/', 'haDB', 'public', 'role', 'test_cat_2', " + time + ", '', 'NATIVE', '', '');",
         "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (1,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://yourhost.com:8020/user/hive/warehouse/mydb',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null);",
         "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (2,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','file:///user/admin/2015_11_18',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null);",
         "insert into TBLS(TBL_ID,CREATE_TIME,DB_ID,LAST_ACCESS_TIME,OWNER,RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED) values (2 ,1435255431,2,0 ,'hive',0,1,'mytal','MANAGED_TABLE',NULL,NULL,'n');",
@@ -519,7 +519,7 @@ public class TestSchemaToolForMetastore {
     String time = String.valueOf(System.currentTimeMillis()/1000);
      String[] scripts = new String[] {
           "insert into CTLGS values (2, 'mycat', 'my description', 'hdfs://myhost.com:8020/user/hive/warehouse', " + time + ");",
-          "insert into DBS values(2, 'my db', 'hdfs://myhost.com:8020/user/hive/warehouse/mydb', 'mydb', 'public', 'role', 'mycat', " + time + ", '');",
+          "insert into DBS values(2, 'my db', 'hdfs://myhost.com:8020/user/hive/warehouse/mydb', 'mydb', 'public', 'role', 'mycat', " + time + ", '', 'NATIVE', '', '');",
           "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (1,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020/user/hive/warehouse/mydb',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null);",
           "insert into SDS(SD_ID,CD_ID,INPUT_FORMAT,IS_COMPRESSED,IS_STOREDASSUBDIRECTORIES,LOCATION,NUM_BUCKETS,OUTPUT_FORMAT,SERDE_ID) values (2,null,'org.apache.hadoop.mapred.TextInputFormat','N','N','hdfs://myhost.com:8020/user/admin/2015_11_18',-1,'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat',null);",
           "insert into TBLS(TBL_ID,CREATE_TIME,DB_ID,LAST_ACCESS_TIME,OWNER,RETENTION,SD_ID,TBL_NAME,TBL_TYPE,VIEW_EXPANDED_TEXT,VIEW_ORIGINAL_TEXT,IS_REWRITE_ENABLED) values (2 ,1435255431,2,0 ,'hive',0,1,'mytal','MANAGED_TABLE',NULL,NULL,'n');",