You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ng...@apache.org on 2021/04/06 15:39:48 UTC

[hive] 04/38: Added provider for postgres, refactored bunch of classes

This is an automated email from the ASF dual-hosted git repository.

ngangam pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit 91f1ccd9d0be977fd3222a53b3e6758f83d88132
Author: Naveen Gangam <ng...@cloudera.com>
AuthorDate: Mon Nov 16 13:24:52 2020 -0500

    Added provider for postgres, refactored bunch of classes
---
 .../hadoop/hive/metastore/conf/MetastoreConf.java  |  10 ++
 .../AbstractDataConnectorProvider.java             |  45 +++++
 .../DataConnectorProviderFactory.java              |  13 +-
 .../dataconnector/IDataConnectorProvider.java      |   2 -
 .../JDBCConnectorProviderFactory.java              |  42 +++++
 ...der.java => AbstractJDBCConnectorProvider.java} | 192 ++++++++++++++-------
 .../dataconnector/jdbc/MySQLConfigGenerator.java   |   4 -
 ...orProvider.java => MySQLConnectorProvider.java} | 137 +++------------
 .../jdbc/PostgreSQLConnectorProvider.java          | 116 +++++++++++++
 9 files changed, 373 insertions(+), 188 deletions(-)

diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
index 2e03223..3489ccc 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
@@ -2257,4 +2257,14 @@ public class MetastoreConf {
     buf.append("Finished MetastoreConf object.\n");
     return buf.toString();
   }
+
+  public static char[] getValueFromKeystore(String keystorePath, String key) throws IOException {
+    char[] valueCharArray = null;
+    if (keystorePath != null && key != null) {
+      Configuration conf = new Configuration();
+      conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, keystorePath);
+      valueCharArray = conf.getPassword(key);
+    }
+    return valueCharArray;
+  }
 }
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/AbstractDataConnectorProvider.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/AbstractDataConnectorProvider.java
index 2a73236..74ddbdd 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/AbstractDataConnectorProvider.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/AbstractDataConnectorProvider.java
@@ -1,11 +1,19 @@
 package org.apache.hadoop.hive.metastore.dataconnector;
 
+import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Order;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 
 import java.net.ConnectException;
+import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 public abstract class AbstractDataConnectorProvider implements IDataConnectorProvider {
   protected String scoped_db = null;
@@ -68,4 +76,41 @@ public abstract class AbstractDataConnectorProvider implements IDataConnectorPro
   @Override public Table getTable(String tableName) throws MetaException {
     return null;
   }
+
+  protected Table buildTableFromColsList(String tableName, List<FieldSchema> cols) {
+    //Setting the storage descriptor.
+    StorageDescriptor sd = new StorageDescriptor();
+    sd.setCols(cols);
+    SerDeInfo serdeInfo = new SerDeInfo();
+    serdeInfo.setName(tableName);
+    serdeInfo.setSerializationLib("org.apache.hive.storage.jdbc.JdbcSerDe");
+    Map<String, String> serdeParams = new HashMap<String, String>();
+    serdeParams.put("serialization.format", "1");
+    serdeInfo.setParameters(serdeParams);
+
+    sd.setSerdeInfo(serdeInfo);
+    sd.setInputFormat("org.apache.hive.storage.jdbc.JdbcInputFormat"); // TODO
+    sd.setOutputFormat("org.apache.hive.storage.jdbc.JdbcOutputFormat"); // TODO
+    sd.setLocation("/tmp/some_dummy_path"); // TODO
+    sd.setBucketCols(new ArrayList<String>());
+    sd.setSortCols(new ArrayList<Order>());
+
+    //Setting the required table information
+    Table table = new Table();
+    table.setTableName(tableName);
+    table.setTableType(TableType.EXTERNAL_TABLE.toString());
+    table.setDbName(scoped_db);
+    table.setSd(sd);
+    // set table properties that subclasses can fill-in
+    table.setParameters(new HashMap<String, String>());
+    // set partition keys to empty
+    table.setPartitionKeys(new
+        ArrayList<FieldSchema>());
+
+    return table;
+  }
+
+  abstract protected String getInputClass();
+
+  abstract protected String getOutputClass();
 }
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/DataConnectorProviderFactory.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/DataConnectorProviderFactory.java
index ce43633..7f321a8 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/DataConnectorProviderFactory.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/DataConnectorProviderFactory.java
@@ -6,18 +6,14 @@ import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.DatabaseType;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import static org.apache.hadoop.hive.metastore.dataconnector.IDataConnectorProvider.DERBY_TYPE;
-import static org.apache.hadoop.hive.metastore.dataconnector.IDataConnectorProvider.MSSQL_TYPE;
-import static org.apache.hadoop.hive.metastore.dataconnector.IDataConnectorProvider.MYSQL_TYPE;
-import static org.apache.hadoop.hive.metastore.dataconnector.IDataConnectorProvider.ORACLE_TYPE;
-import static org.apache.hadoop.hive.metastore.dataconnector.IDataConnectorProvider.POSTGRES_TYPE;
-import org.apache.hadoop.hive.metastore.dataconnector.jdbc.JDBCConnectorProvider;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.HashMap;
 import java.util.Map;
 
+import static org.apache.hadoop.hive.metastore.dataconnector.IDataConnectorProvider.*;
+
 public class DataConnectorProviderFactory {
   Logger LOG = LoggerFactory.getLogger(DataConnectorProviderFactory.class);
 
@@ -45,7 +41,7 @@ public class DataConnectorProviderFactory {
     }
 
     String scopedDb = (db.getRemote_dbname() != null) ? db.getRemote_dbname() : db.getName();
-    if (cache.containsKey(db.getConnector_name().toLowerCase() != null)) {
+    if (cache.containsKey(db.getConnector_name().toLowerCase())) {
       provider = cache.get(db.getConnector_name().toLowerCase());
       if (provider != null) {
         provider.setScope(scopedDb);
@@ -67,7 +63,8 @@ public class DataConnectorProviderFactory {
     case ORACLE_TYPE:
     case POSTGRES_TYPE:
       try {
-        provider = new JDBCConnectorProvider(scopedDb, connector);
+        provider = JDBCConnectorProviderFactory.get(scopedDb, connector);
+        cache.put(db.getConnector_name(), provider);
       } catch (Exception e) {
         throw new MetaException("Could not instantiate a provider for database " + db.getName());
       }
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/IDataConnectorProvider.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/IDataConnectorProvider.java
index 73b3db9..24a1d7e 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/IDataConnectorProvider.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/IDataConnectorProvider.java
@@ -2,14 +2,12 @@ package org.apache.hadoop.hive.metastore.dataconnector;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hive.metastore.HiveMetaException;
 import org.apache.hadoop.hive.metastore.api.DataConnector;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;
 
 import java.net.ConnectException;
 import java.util.List;
-import java.util.Map;
 
 /**
  * This interface provides a way for us to plugin different datasources into hive.
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/JDBCConnectorProviderFactory.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/JDBCConnectorProviderFactory.java
new file mode 100644
index 0000000..537fd2c
--- /dev/null
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/JDBCConnectorProviderFactory.java
@@ -0,0 +1,42 @@
+package org.apache.hadoop.hive.metastore.dataconnector;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.metastore.dataconnector.jdbc.MySQLConnectorProvider;
+import org.apache.hadoop.hive.metastore.dataconnector.jdbc.PostgreSQLConnectorProvider;
+
+import static org.apache.hadoop.hive.metastore.dataconnector.IDataConnectorProvider.MYSQL_TYPE;
+import static org.apache.hadoop.hive.metastore.dataconnector.IDataConnectorProvider.POSTGRES_TYPE;
+
+
+public class JDBCConnectorProviderFactory {
+
+  public static IDataConnectorProvider get(String dbName, DataConnector connector) {
+    IDataConnectorProvider provider = null;
+    switch(connector.getType().toLowerCase()) {
+    case MYSQL_TYPE:
+      provider = new MySQLConnectorProvider(dbName, connector);
+      /*
+      try {
+        Class.forName(driverClassName);
+        handle = DriverManager.getConnection(jdbcUrl, username, password);
+        isOpen = true;
+      } catch (ClassNotFoundException cnfe) {
+        LOG.warn("Driver class not found in classpath:" + driverClassName);
+        throw new RuntimeException("Driver class not found:" + driverClassName);
+      } catch (SQLException sqle) {
+        LOG.warn("Could not connect to remote data source at " + jdbcUrl);
+        throw new ConnectException("Could not connect to remote datasource at " + jdbcUrl + ",cause:" + sqle.getMessage());
+      }
+       */
+      break;
+    case POSTGRES_TYPE:
+      provider = new PostgreSQLConnectorProvider(dbName, connector);
+      break;
+
+    default:
+      throw new RuntimeException("Unsupported JDBC type");
+    }
+
+    return provider;
+  }
+}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/JDBCConnectorProvider.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/AbstractJDBCConnectorProvider.java
similarity index 54%
copy from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/JDBCConnectorProvider.java
copy to standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/AbstractJDBCConnectorProvider.java
index 7e9d408..d436123 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/JDBCConnectorProvider.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/AbstractJDBCConnectorProvider.java
@@ -1,37 +1,53 @@
 package org.apache.hadoop.hive.metastore.dataconnector.jdbc;
 
-import java.net.ConnectException;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Date;
-
-import org.apache.hadoop.hive.conf.Constants;
-import org.apache.hadoop.hive.metastore.ColumnType;
-import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.DataConnector;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.Order;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.apache.hadoop.hive.metastore.dataconnector.AbstractDataConnectorProvider;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.HashMap;
+import java.io.IOException;
+import java.net.ConnectException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
-public class JDBCConnectorProvider extends AbstractDataConnectorProvider {
-  private static Logger LOG = LoggerFactory.getLogger(JDBCConnectorProvider.class);
+public abstract class AbstractJDBCConnectorProvider extends AbstractDataConnectorProvider {
+  private static Logger LOG = LoggerFactory.getLogger(AbstractJDBCConnectorProvider.class);
+
+  // duplicate constants from Constants.java to avoid a dependency on hive-common
+  public static final String JDBC_HIVE_STORAGE_HANDLER_ID =
+      "org.apache.hive.storage.jdbc.JdbcStorageHandler";
+  public static final String JDBC_CONFIG_PREFIX = "hive.sql";
+  public static final String JDBC_CATALOG = JDBC_CONFIG_PREFIX + ".catalog";
+  public static final String JDBC_SCHEMA = JDBC_CONFIG_PREFIX + ".schema";
+  public static final String JDBC_TABLE = JDBC_CONFIG_PREFIX + ".table";
+  public static final String JDBC_DATABASE_TYPE = JDBC_CONFIG_PREFIX + ".database.type";
+  public static final String JDBC_URL = JDBC_CONFIG_PREFIX + ".jdbc.url";
+  public static final String JDBC_DRIVER = JDBC_CONFIG_PREFIX + ".jdbc.driver";
+  public static final String JDBC_USERNAME = JDBC_CONFIG_PREFIX + ".dbcp.username";
+  public static final String JDBC_PASSWORD = JDBC_CONFIG_PREFIX + ".dbcp.password";
+  public static final String JDBC_KEYSTORE = JDBC_CONFIG_PREFIX + ".dbcp.password.keystore";
+  public static final String JDBC_KEY = JDBC_CONFIG_PREFIX + ".dbcp.password.key";
+  public static final String JDBC_QUERY = JDBC_CONFIG_PREFIX + ".query";
+  public static final String JDBC_QUERY_FIELD_NAMES = JDBC_CONFIG_PREFIX + ".query.fieldNames";
+  public static final String JDBC_QUERY_FIELD_TYPES = JDBC_CONFIG_PREFIX + ".query.fieldTypes";
+  public static final String JDBC_SPLIT_QUERY = JDBC_CONFIG_PREFIX + ".query.split";
+  public static final String JDBC_PARTITION_COLUMN = JDBC_CONFIG_PREFIX + ".partitionColumn";
+  public static final String JDBC_NUM_PARTITIONS = JDBC_CONFIG_PREFIX + ".numPartitions";
+  public static final String JDBC_LOW_BOUND = JDBC_CONFIG_PREFIX + ".lowerBound";
+  public static final String JDBC_UPPER_BOUND = JDBC_CONFIG_PREFIX + ".upperBound";
+
+  private static final String JDBC_INPUTFORMAT_CLASS = "org.apache.hive.storage.jdbc.JdbcInputFormat".intern();
+  private static final String JDBC_OUTPUTFORMAT_CLASS = "org.apache.hive.storage.jdbc.JdbcOutputFormat".intern();
 
   String type = null; // MYSQL, POSTGRES, ORACLE, DERBY, MSSQL, DB2 etc.
   String driverClassName = null;
@@ -39,38 +55,40 @@ public class JDBCConnectorProvider extends AbstractDataConnectorProvider {
   String username = null;
   String password = null; // TODO convert to byte array
 
-  public JDBCConnectorProvider(String dbName, DataConnector dataConn) {
+  public AbstractJDBCConnectorProvider(String dbName, DataConnector dataConn) {
     super(dbName, dataConn);
     this.type = connector.getType().toUpperCase(); // TODO
     this.jdbcUrl = connector.getUrl();
-    this.username = connector.getParameters().get("dataconnector.username");
-    this.password = connector.getParameters().get("dataconnector.password");
+    this.username = connector.getParameters().get(JDBC_USERNAME);
+    this.password = connector.getParameters().get(JDBC_PASSWORD);
+    if (this.password == null) {
+      String keystore = connector.getParameters().get(JDBC_KEYSTORE);
+      String key = connector.getParameters().get(JDBC_KEY);
+      try {
+        char[] keyValue = MetastoreConf.getValueFromKeystore(keystore, key);
+        if (keyValue != null)
+          this.password = new String(keyValue);
+      } catch (IOException i) {
+        LOG.warn("Could not read key value from keystore");
+      }
+    }
   }
 
   @Override public void open() throws ConnectException {
-    switch(type.toLowerCase()) {
-    case MYSQL_TYPE:
-      driverClassName = "com.mysql.jdbc.Driver";
-      try {
-        Class.forName(driverClassName);
-        handle = DriverManager.getConnection(jdbcUrl, username, password);
-        isOpen = true;
-      } catch (ClassNotFoundException cnfe) {
-        LOG.warn("Driver class not found in classpath:" + driverClassName);
-        throw new RuntimeException("Driver class not found:" + driverClassName);
-      } catch (SQLException sqle) {
-        LOG.warn("Could not connect to remote data source at " + jdbcUrl);
-        throw new ConnectException("Could not connect to remote datasource at " + jdbcUrl + ",cause:" + sqle.getMessage());
-      }
-      break;
-    case POSTGRES_TYPE:
-      
-    default:
-      throw new RuntimeException("Unsupported JDBC type");
+    try {
+      Class.forName(driverClassName);
+      handle = DriverManager.getConnection(jdbcUrl, username, password);
+      isOpen = true;
+    } catch (ClassNotFoundException cnfe) {
+      LOG.warn("Driver class not found in classpath:" + driverClassName);
+      throw new RuntimeException("Driver class not found:" + driverClassName);
+    } catch (SQLException sqle) {
+      LOG.warn("Could not connect to remote data source at " + jdbcUrl);
+      throw new ConnectException("Could not connect to remote datasource at " + jdbcUrl + ",cause:" + sqle.getMessage());
     }
   }
 
-  private Connection getConnection() {
+  protected Connection getConnection() {
     try {
       if (!isOpen)
         open();
@@ -100,9 +118,7 @@ public class JDBCConnectorProvider extends AbstractDataConnectorProvider {
    * @throws MetaException To indicate any failures with executing this API
    * @param regex
    */
-  @Override public List<Table> getTables(String regex) throws MetaException {
-    return null;
-  }
+  @Override public abstract List<Table> getTables(String regex) throws MetaException;
 
   /**
    * Returns a list of all table names from the remote database.
@@ -133,6 +149,10 @@ public class JDBCConnectorProvider extends AbstractDataConnectorProvider {
     return null;
   }
 
+  protected abstract ResultSet fetchTableMetadata(String tableName) throws MetaException;
+
+  protected abstract ResultSet fetchTableNames() throws MetaException;
+
   /**
    * Fetch a single table with the given name, returns a Hive Table object from the remote database
    * @return Table A Table object for the matching table, null otherwise.
@@ -140,21 +160,30 @@ public class JDBCConnectorProvider extends AbstractDataConnectorProvider {
    * @param tableName
    */
   @Override public Table getTable(String tableName) throws MetaException {
+    ResultSet rs = null;
+    Table table = null;
     try {
+      // rs = fetchTableMetadata(tableName);
+      rs = fetchTableViaDBMetaData(tableName);
+      /*
       Statement stmt = getConnection().createStatement();
-      ResultSet rs = stmt.executeQuery(
+      rs = stmt.executeQuery(
           "SELECT table_name, column_name, is_nullable, data_type, character_maximum_length FROM INFORMATION_SCHEMA.Columns where table_schema='"
               + scoped_db + "' and table_name='" + tableName + "'");
+       */
       List<FieldSchema> cols = new ArrayList<>();
       // TODO throw exception is RS is empty
       while (rs.next()) {
         FieldSchema fs = new FieldSchema();
         fs.setName(rs.getString("COLUMN_NAME"));
-        fs.setType(getDataType(rs.getString("DATA_TYPE"), rs.getInt("CHARACTER_MAXIMUM_LENGTH")));
+        // fs.setType(getDataType(rs.getString("DATA_TYPE"), rs.getInt("CHARACTER_MAXIMUM_LENGTH")));
+        fs.setType(getDataType(rs.getString("TYPE_NAME"), rs.getInt("COLUMN_SIZE")));
         fs.setComment("inferred column type");
         cols.add(fs);
       }
 
+      table = buildTableFromColsList(tableName, cols);
+      /*
       //Setting the storage descriptor.
       StorageDescriptor sd = new StorageDescriptor();
       sd.setCols(cols);
@@ -176,41 +205,65 @@ public class JDBCConnectorProvider extends AbstractDataConnectorProvider {
       sd.setLocation("/tmp/some_dummy_path"); // TODO
       sd.setBucketCols(new ArrayList<String>());
       sd.setSortCols(new ArrayList<Order>());
+       */
 
       //Setting the table properties.
-      Map<String, String> tblProps = new HashMap<>();
-      tblProps.put(Constants.JDBC_DATABASE_TYPE, this.type);
-      tblProps.put(Constants.JDBC_DRIVER, this.driverClassName);
-      tblProps.put(Constants.JDBC_URL, this.jdbcUrl); // "jdbc://localhost:3306/hive"
-      tblProps.put(Constants.JDBC_USERNAME, this.username);
-      tblProps.put(Constants.JDBC_PASSWORD, this.password);
-      tblProps.put(Constants.JDBC_TABLE, tableName);
-      tblProps.put(hive_metastoreConstants.META_TABLE_STORAGE, Constants.JDBC_HIVE_STORAGE_HANDLER_ID);
-      tblProps.put("EXTERNAL", "TRUE");
-      // TODO: Need to include schema, catalog info in the paramters list.
+      table.getParameters().put(JDBC_DATABASE_TYPE, this.type);
+      table.getParameters().put(JDBC_DRIVER, this.driverClassName);
+      table.getParameters().put(JDBC_TABLE, tableName);
+      table.getParameters().put(JDBC_URL, this.jdbcUrl);
+      table.getParameters().put(hive_metastoreConstants.META_TABLE_STORAGE, JDBC_HIVE_STORAGE_HANDLER_ID);
+      table.getParameters().put("EXTERNAL", "TRUE");
+      Map<String, String> connectorParams = connector.getParameters();
+      for (String param: connectorParams.keySet()) {
+        if (param.startsWith(JDBC_CONFIG_PREFIX)) {
+          table.getParameters().put(param, connectorParams.get(param));
+        }
+      }
+      // TODO: Need to include schema, catalog info in the parameters list.
 
       //Setting the required table information
-      Table table = new Table();
-      table.setTableName(tableName);
-      table.setTableType(TableType.EXTERNAL_TABLE.toString());
-      table.setDbName(scoped_db);
-      table.setSd(sd);
-      table.setParameters(tblProps);
+      // Table table = new Table();
+      // table.setTableName(tableName);
+      // table.setTableType(TableType.EXTERNAL_TABLE.toString());
+      // table.setDbName(scoped_db);
+      // table.setSd(sd);
       // set partition keys to empty
-      table.setPartitionKeys(new ArrayList<FieldSchema>());
+      // table.setPartitionKeys(new
+          // ArrayList<FieldSchema>());
 
+      // table.setParameters(tblProps);
       return table;
     } catch (Exception e) {
       LOG.warn("Exception retrieving remote table " + scoped_db + "." + tableName + " via data connector "
           + connector.getName());
       throw new MetaException("Error retrieving remote table:" + e);
+    } finally {
+      try {
+        if (rs != null) {
+          rs.close();
+        }
+      } catch (Exception ex) { /* ignore */ }
     }
   }
 
+  private ResultSet fetchTableViaDBMetaData(String tableName) {
+    ResultSet rs = null;
+    try {
+      rs = getConnection().getMetaData().getColumns(scoped_db, null, tableName, null);
+    } catch (SQLException sqle) {
+      LOG.warn("Could not retrieve column names from JDBC table, cause:" + sqle.getMessage());
+    }
+    return rs;
+  }
+
   private String wrapSize(int size) {
     return "(" + size + ")";
   }
 
+  protected abstract String getDataType(String dbType, int size);
+
+  /*
   private String getDataType(String mySqlType, int size) {
     //TODO: Geomentric, network, bit, array data types of postgresql needs to be supported.
     switch(mySqlType)
@@ -273,4 +326,13 @@ public class JDBCConnectorProvider extends AbstractDataConnectorProvider {
       return ColumnType.VOID_TYPE_NAME;
     }
   }
+ */
+
+  @Override protected String getInputClass() {
+    return JDBC_INPUTFORMAT_CLASS;
+  }
+
+  @Override protected String getOutputClass() {
+    return JDBC_INPUTFORMAT_CLASS;
+  }
 }
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConfigGenerator.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConfigGenerator.java
deleted file mode 100644
index 307f7ad..0000000
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConfigGenerator.java
+++ /dev/null
@@ -1,4 +0,0 @@
-package org.apache.hadoop.hive.metastore.dataconnector.jdbc;
-
-public class MySQLConfigGenerator {
-}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/JDBCConnectorProvider.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
similarity index 63%
rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/JDBCConnectorProvider.java
rename to standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
index 7e9d408..cdeb6fa 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/JDBCConnectorProvider.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
@@ -1,135 +1,51 @@
 package org.apache.hadoop.hive.metastore.dataconnector.jdbc;
 
-import java.net.ConnectException;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Date;
-
-import org.apache.hadoop.hive.conf.Constants;
 import org.apache.hadoop.hive.metastore.ColumnType;
-import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.DataConnector;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.Order;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
-import org.apache.hadoop.hive.metastore.dataconnector.AbstractDataConnectorProvider;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.HashMap;
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
 import java.util.List;
-import java.util.Map;
 
-public class JDBCConnectorProvider extends AbstractDataConnectorProvider {
-  private static Logger LOG = LoggerFactory.getLogger(JDBCConnectorProvider.class);
+public class MySQLConnectorProvider extends AbstractJDBCConnectorProvider {
+  private static Logger LOG = LoggerFactory.getLogger(MySQLConnectorProvider.class);
 
-  String type = null; // MYSQL, POSTGRES, ORACLE, DERBY, MSSQL, DB2 etc.
-  String driverClassName = null;
-  String jdbcUrl = null;
-  String username = null;
-  String password = null; // TODO convert to byte array
+  private static final String DRIVER_CLASS = "com.mysql.jdbc.Driver".intern();
 
-  public JDBCConnectorProvider(String dbName, DataConnector dataConn) {
+  public MySQLConnectorProvider(String dbName, DataConnector dataConn) {
     super(dbName, dataConn);
-    this.type = connector.getType().toUpperCase(); // TODO
-    this.jdbcUrl = connector.getUrl();
-    this.username = connector.getParameters().get("dataconnector.username");
-    this.password = connector.getParameters().get("dataconnector.password");
-  }
-
-  @Override public void open() throws ConnectException {
-    switch(type.toLowerCase()) {
-    case MYSQL_TYPE:
-      driverClassName = "com.mysql.jdbc.Driver";
-      try {
-        Class.forName(driverClassName);
-        handle = DriverManager.getConnection(jdbcUrl, username, password);
-        isOpen = true;
-      } catch (ClassNotFoundException cnfe) {
-        LOG.warn("Driver class not found in classpath:" + driverClassName);
-        throw new RuntimeException("Driver class not found:" + driverClassName);
-      } catch (SQLException sqle) {
-        LOG.warn("Could not connect to remote data source at " + jdbcUrl);
-        throw new ConnectException("Could not connect to remote datasource at " + jdbcUrl + ",cause:" + sqle.getMessage());
-      }
-      break;
-    case POSTGRES_TYPE:
-      
-    default:
-      throw new RuntimeException("Unsupported JDBC type");
-    }
-  }
-
-  private Connection getConnection() {
-    try {
-      if (!isOpen)
-        open();
-    } catch (ConnectException ce) {
-      throw new RuntimeException(ce.getMessage());
-    }
-
-    if (handle instanceof Connection)
-      return (Connection)handle;
-
-    throw new RuntimeException("unexpected type for connection handle");
-  }
-
-  @Override public void close() {
-    if (isOpen) {
-      try {
-        ((Connection)handle).close();
-      } catch (SQLException sqle) {
-        LOG.warn("Could not close jdbc connection to " + jdbcUrl, sqle);
-      }
-    }
-  }
-
-  /**
-   * Returns Hive Table objects from the remote database for tables that match a name pattern.
-   * @return List A collection of objects that match the name pattern, null otherwise.
-   * @throws MetaException To indicate any failures with executing this API
-   * @param regex
-   */
-  @Override public List<Table> getTables(String regex) throws MetaException {
-    return null;
+    driverClassName = DRIVER_CLASS;
   }
 
   /**
    * Returns a list of all table names from the remote database.
    * @return List A collection of all the table names, null if there are no tables.
-   * @throws MetaException To indicate any failures with executing this API
+   * @throws IOException To indicate any failures with executing this API
    */
-  @Override public List<String> getTableNames() throws MetaException {
+  @Override protected ResultSet fetchTableNames() throws MetaException {
     ResultSet rs = null;
     try {
       rs = getConnection().getMetaData().getTables(scoped_db, null, null, new String[] { "TABLE" });
-      if (rs != null) {
-        List<String> tables = new ArrayList<String>();
-        while(rs.next()) {
-          tables.add(rs.getString(3));
-        }
-        return tables;
-      }
     } catch (SQLException sqle) {
       LOG.warn("Could not retrieve table names from remote datasource, cause:" + sqle.getMessage());
-    } finally {
-      try {
-        if (rs != null) {
-          rs.close();
-          rs = null;
-        }
-      } catch(Exception e) { /* ignore */}
+      throw new MetaException("Could not retrieve table names from remote datasource, cause:" + sqle.getMessage());
     }
+    return rs;
+  }
+
+  /**
+   * Returns Hive Table objects from the remote database for tables that match a name pattern.
+   * @return List A collection of objects that match the name pattern, null otherwise.
+   * @throws MetaException To indicate any failures with executing this API
+   * @param regex
+   */
+  @Override public  List<Table> getTables(String regex) throws MetaException {
     return null;
   }
 
@@ -139,12 +55,13 @@ public class JDBCConnectorProvider extends AbstractDataConnectorProvider {
    * @throws MetaException To indicate any failures with executing this API
    * @param tableName
    */
-  @Override public Table getTable(String tableName) throws MetaException {
+  @Override public ResultSet fetchTableMetadata(String tableName) throws MetaException {
     try {
       Statement stmt = getConnection().createStatement();
       ResultSet rs = stmt.executeQuery(
           "SELECT table_name, column_name, is_nullable, data_type, character_maximum_length FROM INFORMATION_SCHEMA.Columns where table_schema='"
               + scoped_db + "' and table_name='" + tableName + "'");
+      /*
       List<FieldSchema> cols = new ArrayList<>();
       // TODO throw exception is RS is empty
       while (rs.next()) {
@@ -200,6 +117,8 @@ public class JDBCConnectorProvider extends AbstractDataConnectorProvider {
       table.setPartitionKeys(new ArrayList<FieldSchema>());
 
       return table;
+       */
+      return rs;
     } catch (Exception e) {
       LOG.warn("Exception retrieving remote table " + scoped_db + "." + tableName + " via data connector "
           + connector.getName());
@@ -211,9 +130,9 @@ public class JDBCConnectorProvider extends AbstractDataConnectorProvider {
     return "(" + size + ")";
   }
 
-  private String getDataType(String mySqlType, int size) {
+  protected String getDataType(String dbDataType, int size) {
     //TODO: Geomentric, network, bit, array data types of postgresql needs to be supported.
-    switch(mySqlType)
+    switch(dbDataType.toLowerCase())
     {
     case "char":
       return ColumnType.CHAR_TYPE_NAME + wrapSize(size);
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/PostgreSQLConnectorProvider.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/PostgreSQLConnectorProvider.java
new file mode 100644
index 0000000..41e08e9
--- /dev/null
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/PostgreSQLConnectorProvider.java
@@ -0,0 +1,116 @@
+package org.apache.hadoop.hive.metastore.dataconnector.jdbc;
+
+import org.apache.hadoop.hive.metastore.ColumnType;
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.List;
+
+public class PostgreSQLConnectorProvider extends AbstractJDBCConnectorProvider {
+  private static Logger LOG = LoggerFactory.getLogger(MySQLConnectorProvider.class);
+  private static final String DRIVER_CLASS = "org.postgresql.Driver".intern();
+
+  public PostgreSQLConnectorProvider(String dbName, DataConnector dataConn) {
+    super(dbName, dataConn);
+    driverClassName = DRIVER_CLASS;
+  }
+
+  /**
+   * Returns Hive Table objects from the remote database for tables that match a name pattern.
+   * @return List A collection of objects that match the name pattern, null otherwise.
+   * @throws MetaException To indicate any failures with executing this API
+   * @param regex
+   */
+  @Override public List<Table> getTables(String regex) throws MetaException {
+    return null;
+  }
+
+  @Override protected ResultSet fetchTableMetadata(String tableName) throws MetaException {
+    ResultSet rs = null;
+    try {
+      rs = getConnection().getMetaData().getTables(scoped_db, null, null, new String[] { "TABLE" });
+    } catch (SQLException sqle) {
+      LOG.warn("Could not retrieve table names from remote datasource, cause:" + sqle.getMessage());
+      throw new MetaException("Could not retrieve table names from remote datasource, cause:" + sqle.getMessage());
+    }
+    return rs;
+  }
+
+  @Override protected ResultSet fetchTableNames() throws MetaException {
+    return null;
+  }
+
+  private String wrapSize(int size) {
+    return "(" + size + ")";
+  }
+
+  protected String getDataType(String dbDataType, int size) {
+    //TODO: Geomentric, network, bit, array data types of postgresql needs to be supported.
+    switch(dbDataType.toLowerCase())
+    {
+    case "char":
+    case "bpchar":
+      return ColumnType.CHAR_TYPE_NAME + wrapSize(size);
+    case "varchar":
+    case "tinytext":
+      return ColumnType.VARCHAR_TYPE_NAME + wrapSize(size);
+    case "text":
+    case "mediumtext":
+    case "enum":
+    case "set":
+    case "tsvector":
+    case "tsquery":
+    case "uuid":
+    case "json":
+      return ColumnType.STRING_TYPE_NAME;
+    case "blob":
+    case "mediumblob":
+    case "longblob":
+    case "bytea":
+      return ColumnType.BINARY_TYPE_NAME;
+    case "tinyint":
+      return ColumnType.TINYINT_TYPE_NAME;
+    case "smallint":
+    case "smallserial":
+      return ColumnType.SMALLINT_TYPE_NAME;
+    case "mediumint":
+    case "int":
+    case "serial":
+      return ColumnType.INT_TYPE_NAME;
+    case "bigint":
+    case "bigserial":
+    case "int8":
+    case "money":
+      return ColumnType.BIGINT_TYPE_NAME;
+    case "float":
+    case "real":
+      return ColumnType.FLOAT_TYPE_NAME;
+    case "double":
+    case "double precision":
+      return ColumnType.DOUBLE_TYPE_NAME;
+    case "decimal":
+    case "numeric":
+      return ColumnType.DECIMAL_TYPE_NAME;
+    case "date":
+      return ColumnType.DATE_TYPE_NAME;
+    case "datetime":
+      return ColumnType.DATETIME_TYPE_NAME;
+    case "timestamp":
+    case "time":
+    case "interval":
+      return ColumnType.TIMESTAMP_TYPE_NAME;
+    case "timestampz":
+    case "timez":
+      return ColumnType.TIMESTAMPTZ_TYPE_NAME;
+    case "boolean":
+      return ColumnType.BOOLEAN_TYPE_NAME;
+    default:
+      return ColumnType.VOID_TYPE_NAME;
+    }
+  }
+}