You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ng...@apache.org on 2021/04/06 15:40:19 UTC
[hive] 35/38: HIVE-24396: Changes from additional feedback from
code review (Naveen Gangam)
This is an automated email from the ASF dual-hosted git repository.
ngangam pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
commit 53edee9d42af1cc09660e0bd63c44ac1f2b6d7c7
Author: Naveen Gangam <ng...@cloudera.com>
AuthorDate: Fri Mar 26 10:00:07 2021 -0400
HIVE-24396: Changes from additional feedback from code review (Naveen Gangam)
---
.../ddl/database/create/CreateDatabaseOperation.java | 4 +++-
.../ql/ddl/database/desc/DescDatabaseOperation.java | 18 +++++++++++-------
.../alter/AbstractAlterDataConnectorOperation.java | 4 ++--
.../alter/url/AlterDataConnectorSetUrlOperation.java | 11 +++--------
.../show/ShowDataConnectorsOperation.java | 2 +-
.../java/org/apache/hadoop/hive/ql/metadata/Hive.java | 8 ++------
ql/src/test/queries/clientpositive/dataconnector.q | 6 +++---
.../hadoop/hive/metastore/HiveMetaStoreClient.java | 2 +-
.../apache/hadoop/hive/metastore/IMetaStoreClient.java | 2 +-
.../jdbc/AbstractJDBCConnectorProvider.java | 4 +++-
.../dataconnector/jdbc/DerbySQLConnectorProvider.java | 2 +-
.../dataconnector/jdbc/MySQLConnectorProvider.java | 2 +-
.../hive/metastore/HiveMetaStoreClientPreCatalog.java | 2 +-
.../hadoop/hive/metastore/TestHiveMetaStore.java | 10 +++-------
14 files changed, 36 insertions(+), 41 deletions(-)
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java
index d02b039..c4961b2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java
@@ -58,10 +58,12 @@ public class CreateDatabaseOperation extends DDLOperation<CreateDatabaseDesc> {
if (database.getLocationUri().equalsIgnoreCase(database.getManagedLocationUri())) {
throw new HiveException("Managed and external locations for database cannot be the same");
}
- } else {
+ } else if (desc.getDatabaseType() == DatabaseType.REMOTE) {
makeLocationQualified(database);
database.setConnector_name(desc.getConnectorName());
database.setRemote_dbname(desc.getRemoteDbName());
+ } else { // should never be here
+ throw new HiveException("Unsupported database type " + database.getType() + " for " + database.getName());
}
context.getDb().createDatabase(database, desc.getIfNotExists());
} catch (AlreadyExistsException ex) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java
index 0a19ccc..332e36e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java
@@ -32,6 +32,8 @@ import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
import org.apache.hadoop.hive.ql.ddl.ShowUtils;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import static org.apache.hadoop.hive.metastore.api.DatabaseType.NATIVE;
+
/**
* Operation process of describing a database.
*/
@@ -49,25 +51,27 @@ public class DescDatabaseOperation extends DDLOperation<DescDatabaseDesc> {
}
SortedMap<String, String> params = null;
+ String location = "";
if (desc.isExtended()) {
params = new TreeMap<>(database.getParameters());
}
- String location = "";
DescDatabaseFormatter formatter = DescDatabaseFormatter.getFormatter(context.getConf());
- if (database.getType() == DatabaseType.NATIVE) {
+ switch(database.getType()) {
+ case NATIVE:
location = database.getLocationUri();
if (HiveConf.getBoolVar(context.getConf(), HiveConf.ConfVars.HIVE_IN_TEST)) {
location = "location/in/test";
}
- // database.setRemote_dbname("");
- // database.setConnector_name("");
-
formatter.showDatabaseDescription(outStream, database.getName(), database.getDescription(), location,
- database.getManagedLocationUri(), database.getOwnerName(), database.getOwnerType(), params, "", "");
- } else {
+ database.getManagedLocationUri(), database.getOwnerName(), database.getOwnerType(), params, "", "");
+ break;
+ case REMOTE:
formatter.showDatabaseDescription(outStream, database.getName(), database.getDescription(), "", "",
database.getOwnerName(), database.getOwnerType(), params, database.getConnector_name(), database.getRemote_dbname());
+ break;
+ default:
+ throw new HiveException("Unsupported database type " + database.getType() + " for " + database.getName());
}
} catch (Exception e) {
throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorOperation.java
index a29dd4a..84093e7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorOperation.java
@@ -39,11 +39,11 @@ public abstract class AbstractAlterDataConnectorOperation<T extends AbstractAlte
String dcName = desc.getConnectorName();
DataConnector connector = context.getDb().getDataConnector(dcName);
if (connector == null) {
- throw new HiveException(ErrorMsg.DATACONNECTOR_ALREADY_EXISTS, dcName);
+ throw new HiveException(ErrorMsg.DATACONNECTOR_NOT_EXISTS, dcName);
}
Map<String, String> params = connector.getParameters();
- if ((null != desc.getReplicationSpec()) &&
+ if ((desc.getReplicationSpec() != null) &&
!desc.getReplicationSpec().allowEventReplacementInto(params)) {
LOG.debug("DDLTask: Alter Connector {} is skipped as connector is newer than update", dcName);
return 0; // no replacement, the existing connector state is newer than our update.
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlOperation.java
index d9caee0..190e833 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlOperation.java
@@ -43,20 +43,15 @@ public class AlterDataConnectorSetUrlOperation extends
try {
String newUrl = desc.getURL();
- if (newUrl.equalsIgnoreCase(connector.getUrl())) {
- throw new HiveException("Old and New URL's for data connector cannot be the same");
+ if (StringUtils.isBlank(newUrl) || newUrl.equals(connector.getUrl())) {
+ throw new HiveException("New URL for data connector cannot be blank or the same as the current one");
}
URI newURI = new URI(newUrl);
if (!newURI.isAbsolute() || StringUtils.isBlank(newURI.getScheme())) {
throw new HiveException(ErrorMsg.INVALID_PATH, newUrl); // TODO make a new error message for URL
}
-
- if (newUrl.equals(connector.getUrl())) {
- LOG.info("Alter Connector skipped. No change in url.");
- } else {
- connector.setUrl(newUrl);
- }
+ connector.setUrl(newUrl);
return;
} catch (URISyntaxException e) {
throw new HiveException(e);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/show/ShowDataConnectorsOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/show/ShowDataConnectorsOperation.java
index 22b10d8..b019036 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/show/ShowDataConnectorsOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/show/ShowDataConnectorsOperation.java
@@ -42,7 +42,7 @@ public class ShowDataConnectorsOperation extends DDLOperation<ShowDataConnectors
@Override
public int execute() throws HiveException {
- List<String> connectors = context.getDb().getAllDataConnectors();
+ List<String> connectors = context.getDb().getAllDataConnectorNames();
if (desc.getPattern() != null) {
LOG.debug("pattern: {}", desc.getPattern());
Pattern pattern = Pattern.compile(UDFLike.likePatternToRegExp(desc.getPattern()), Pattern.CASE_INSENSITIVE);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index c2c6d7f..31170f6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -52,7 +52,6 @@ import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
-import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
@@ -74,7 +73,6 @@ import javax.jdo.JDODataStoreException;
import com.google.common.collect.ImmutableList;
-import org.apache.calcite.plan.RelOptMaterialization;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.ObjectUtils;
@@ -103,7 +101,6 @@ import org.apache.hadoop.hive.conf.Constants;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.api.GetPartitionsByNamesRequest;
-import org.apache.hadoop.hive.metastore.api.GetPartitionsByNamesResult;
import org.apache.hadoop.hive.ql.io.HdfsUtils;
import org.apache.hadoop.hive.metastore.HiveMetaException;
import org.apache.hadoop.hive.metastore.HiveMetaHook;
@@ -219,7 +216,6 @@ import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.hive.common.util.HiveVersionInfo;
-import org.apache.hive.common.util.TxnIdUtils;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -928,9 +924,9 @@ public class Hive {
* @return List of all dataconnector names.
* @throws HiveException
*/
- public List<String> getAllDataConnectors() throws HiveException {
+ public List<String> getAllDataConnectorNames() throws HiveException {
try {
- return getMSC().getAllDataConnectors();
+ return getMSC().getAllDataConnectorNames();
} catch (NoSuchObjectException e) {
return null;
} catch (Exception e) {
diff --git a/ql/src/test/queries/clientpositive/dataconnector.q b/ql/src/test/queries/clientpositive/dataconnector.q
index c21524a..a89c7db 100644
--- a/ql/src/test/queries/clientpositive/dataconnector.q
+++ b/ql/src/test/queries/clientpositive/dataconnector.q
@@ -11,7 +11,7 @@ WITH DCPROPERTIES (
"hive.sql.dbcp.password"="hive1");
SHOW CONNECTORS;
--- CREATE INE already exists
+-- CREATE IF NOT EXISTS already
CREATE CONNECTOR IF NOT EXISTS mysql_test
TYPE 'mysql'
URL 'jdbc:mysql://nightly1.apache.org:3306/hive1'
@@ -21,7 +21,7 @@ WITH DCPROPERTIES (
"hive.sql.dbcp.password"="hive1");
SHOW CONNECTORS;
--- CREATE INE already exists
+-- CREATE IF NOT EXISTS already
CREATE CONNECTOR IF NOT EXISTS derby_test
TYPE 'derby'
URL 'jdbc:derby:./target/tmp/junit_metastore_db;create=true'
@@ -34,7 +34,7 @@ WITH DCPROPERTIES (
DROP CONNECTOR mysql_test;
SHOW CONNECTORS;
--- DROP IE exists
+-- DROP IF exists
DROP CONNECTOR IF EXISTS mysql_test;
SHOW CONNECTORS;
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 9dbaacc..0455454 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -1177,7 +1177,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
* @throws TException thrift transport error
*/
@Override
- public List<String> getAllDataConnectors() throws MetaException, TException {
+ public List<String> getAllDataConnectorNames() throws MetaException, TException {
return client.get_dataconnectors();
}
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
index 38a9888..3f9da12 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -2038,7 +2038,7 @@ public interface IMetaStoreClient {
* @throws MetaException error accessing RDBMS.
* @throws TException thrift transport error
*/
- List<String> getAllDataConnectors() throws MetaException, TException;
+ List<String> getAllDataConnectorNames() throws MetaException, TException;
/**
* Drop a partition.
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/AbstractJDBCConnectorProvider.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/AbstractJDBCConnectorProvider.java
index 4027555..72a6efb 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/AbstractJDBCConnectorProvider.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/AbstractJDBCConnectorProvider.java
@@ -145,6 +145,7 @@ public abstract class AbstractJDBCConnectorProvider extends AbstractDataConnecto
}
} catch (SQLException sqle) {
LOG.warn("Could not retrieve table names from remote datasource, cause:" + sqle.getMessage());
+ throw new MetaException("Error retrieving remote table:" + sqle);
} finally {
try {
if (rs != null) {
@@ -214,12 +215,13 @@ public abstract class AbstractJDBCConnectorProvider extends AbstractDataConnecto
}
}
- private ResultSet fetchTableViaDBMetaData(String tableName) {
+ private ResultSet fetchTableViaDBMetaData(String tableName) throws SQLException {
ResultSet rs = null;
try {
rs = getConnection().getMetaData().getColumns(scoped_db, null, tableName, null);
} catch (SQLException sqle) {
LOG.warn("Could not retrieve column names from JDBC table, cause:" + sqle.getMessage());
+ throw sqle;
}
return rs;
}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/DerbySQLConnectorProvider.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/DerbySQLConnectorProvider.java
index c212098..9830974 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/DerbySQLConnectorProvider.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/DerbySQLConnectorProvider.java
@@ -35,7 +35,7 @@ public class DerbySQLConnectorProvider extends AbstractJDBCConnectorProvider {
rs = getConnection().getMetaData().getTables(scoped_db, null, null, new String[] { "TABLE" });
} catch (SQLException sqle) {
LOG.warn("Could not retrieve table names from remote datasource, cause:" + sqle.getMessage());
- throw new MetaException("Could not retrieve table names from remote datasource, cause:" + sqle.getMessage());
+ throw new MetaException("Could not retrieve table names from remote datasource, cause:" + sqle);
}
return rs;
}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
index 17d5a8b..9b8b434 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
@@ -33,7 +33,7 @@ public class MySQLConnectorProvider extends AbstractJDBCConnectorProvider {
rs = getConnection().getMetaData().getTables(scoped_db, null, null, new String[] { "TABLE" });
} catch (SQLException sqle) {
LOG.warn("Could not retrieve table names from remote datasource, cause:" + sqle.getMessage());
- throw new MetaException("Could not retrieve table names from remote datasource, cause:" + sqle.getMessage());
+ throw new MetaException("Could not retrieve table names from remote datasource, cause:" + sqle);
}
return rs;
}
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java
index 4d33ac4..6c44e0f 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java
@@ -1274,7 +1274,7 @@ public class HiveMetaStoreClientPreCatalog implements IMetaStoreClient, AutoClos
/** {@inheritDoc} */
@Override
- public List<String> getAllDataConnectors() throws MetaException {
+ public List<String> getAllDataConnectorNames() throws MetaException {
try {
client.get_dataconnectors(); // TODO run thru filterhook
} catch (Exception e) {
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
index f11724d..c53f1bc 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.metastore;
import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
@@ -29,11 +28,9 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
-import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
@@ -50,7 +47,6 @@ import org.apache.hadoop.hive.metastore.api.GetPartitionsFilterSpec;
import org.apache.hadoop.hive.metastore.api.GetProjectionsSpec;
import org.apache.hadoop.hive.metastore.api.GetPartitionsRequest;
import org.apache.hadoop.hive.metastore.api.GetPartitionsResponse;
-import org.apache.hadoop.hive.metastore.api.PartitionSpec;
import org.apache.hadoop.hive.metastore.api.PartitionSpecWithSharedSD;
import org.apache.hadoop.hive.metastore.api.PartitionWithoutSD;
import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder;
@@ -3538,7 +3534,7 @@ public abstract class TestHiveMetaStore {
assertEquals("type of data connector returned is different from the type inserted", postgres_type, dConn.getType());
assertEquals("url of the data connector returned is different from the url inserted", postgres_url, dConn.getUrl());
- List<String> connectors = client.getAllDataConnectors();
+ List<String> connectors = client.getAllDataConnectorNames();
assertEquals("Number of dataconnectors returned is not as expected", 2, connectors.size());
DataConnector connector1 = new DataConnector(connector);
@@ -3566,11 +3562,11 @@ public abstract class TestHiveMetaStore {
assertEquals("Data connector owner type not as expected", PrincipalType.ROLE, dConn.getOwnerType());
client.dropDataConnector(connector_name1, false, false);
- connectors = client.getAllDataConnectors();
+ connectors = client.getAllDataConnectorNames();
assertEquals("Number of dataconnectors returned is not as expected", 1, connectors.size());
client.dropDataConnector(connector_name2, false, false);
- connectors = client.getAllDataConnectors();
+ connectors = client.getAllDataConnectorNames();
assertEquals("Number of dataconnectors returned is not as expected", 0, connectors.size());
} catch (Throwable e) {
System.err.println(StringUtils.stringifyException(e));