You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by pv...@apache.org on 2018/04/27 11:37:27 UTC
hive git commit: HIVE-19285: Add logs to the subclasses of
MetaDataOperation (Marta Kuczora, via Peter Vary)
Repository: hive
Updated Branches:
refs/heads/master 230cc4bb2 -> 477649727
HIVE-19285: Add logs to the subclasses of MetaDataOperation (Marta Kuczora, via Peter Vary)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/47764972
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/47764972
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/47764972
Branch: refs/heads/master
Commit: 477649727dc9f232545ac4f2559fedda17665881
Parents: 230cc4b
Author: Marta Kuczora <ku...@cloudera.com>
Authored: Fri Apr 27 13:36:27 2018 +0200
Committer: Peter Vary <pv...@cloudera.com>
Committed: Fri Apr 27 13:36:27 2018 +0200
----------------------------------------------------------------------
.../cli/operation/GetCatalogsOperation.java | 8 +++-
.../cli/operation/GetColumnsOperation.java | 19 +++++++-
.../operation/GetCrossReferenceOperation.java | 27 +++++++++---
.../cli/operation/GetFunctionsOperation.java | 19 +++++++-
.../cli/operation/GetPrimaryKeysOperation.java | 46 ++++++++++++++------
.../cli/operation/GetSchemasOperation.java | 18 +++++++-
.../cli/operation/GetTableTypesOperation.java | 17 +++++++-
.../cli/operation/GetTablesOperation.java | 22 +++++++++-
.../cli/operation/GetTypeInfoOperation.java | 15 ++++++-
.../cli/operation/MetadataOperation.java | 17 ++++++++
10 files changed, 180 insertions(+), 28 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/47764972/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
index 7944467..d7fc1e8 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
@@ -30,12 +30,17 @@ import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* GetCatalogsOperation.
*
*/
public class GetCatalogsOperation extends MetadataOperation {
+
+ private static final Logger LOG = LoggerFactory.getLogger(GetCatalogsOperation.class.getName());
+
private static final TableSchema RESULT_SET_SCHEMA = new TableSchema()
.addStringColumn("TABLE_CAT", "Catalog name. NULL if not applicable.");
@@ -44,6 +49,7 @@ public class GetCatalogsOperation extends MetadataOperation {
protected GetCatalogsOperation(HiveSession parentSession) {
super(parentSession, OperationType.GET_CATALOGS);
rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
+ LOG.info("Starting GetCatalogsOperation");
}
@Override
@@ -54,11 +60,11 @@ public class GetCatalogsOperation extends MetadataOperation {
authorizeMetaGets(HiveOperationType.GET_CATALOGS, null);
}
setState(OperationState.FINISHED);
+ LOG.info("Fetching catalog metadata has been successfully finished");
} catch (HiveSQLException e) {
setState(OperationState.ERROR);
throw e;
}
-
}
/* (non-Javadoc)
http://git-wip-us.apache.org/repos/asf/hive/blob/47764972/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
index d67ea90..838dd89 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
@@ -50,6 +50,8 @@ import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* GetColumnsOperation.
@@ -57,6 +59,8 @@ import org.apache.hive.service.cli.session.HiveSession;
*/
public class GetColumnsOperation extends MetadataOperation {
+ private static final Logger LOG = LoggerFactory.getLogger(GetColumnsOperation.class.getName());
+
private static final TableSchema RESULT_SET_SCHEMA = new TableSchema()
.addPrimitiveColumn("TABLE_CAT", Type.STRING_TYPE,
"Catalog name. NULL if not applicable")
@@ -127,11 +131,15 @@ public class GetColumnsOperation extends MetadataOperation {
this.tableName = tableName;
this.columnName = columnName;
this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
+ LOG.info("Starting GetColumnsOperation with the following parameters: "
+ + "catalogName={}, schemaName={}, tableName={}, columnName={}",
+ catalogName, schemaName, tableName, columnName);
}
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
+ LOG.info("Fetching column metadata");
try {
IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
String schemaPattern = convertSchemaPattern(schemaName);
@@ -204,18 +212,25 @@ public class GetColumnsOperation extends MetadataOperation {
"NO", // IS_AUTO_INCREMENT
};
rowSet.addRow(rowData);
+
+ if (LOG.isDebugEnabled()) {
+ String debugMessage = getDebugMessage("column", RESULT_SET_SCHEMA);
+ LOG.debug(debugMessage, rowData);
+ }
}
}
}
+ if (LOG.isDebugEnabled() && rowSet.numRows() == 0) {
+ LOG.debug("No column metadata has been returned.");
+ }
setState(OperationState.FINISHED);
+ LOG.info("Fetching column metadata has been successfully finished");
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
-
}
-
private List<HivePrivilegeObject> getPrivObjs(Map<String, List<String>> db2Tabs) {
List<HivePrivilegeObject> privObjs = new ArrayList<>();
for (Entry<String, List<String>> dbTabs : db2Tabs.entrySet()) {
http://git-wip-us.apache.org/repos/asf/hive/blob/47764972/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
index 99ccd4e..e39502f 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetCrossReferenceOperation.java
@@ -117,11 +117,16 @@ public class GetCrossReferenceOperation extends MetadataOperation {
this.foreignSchemaName = foreignSchema;
this.foreignTableName = foreignTable;
this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
+ LOG.info("Starting GetCrossReferenceOperation with the following parameters:"
+ + " parentCatalogName={}, parentSchemaName={}, parentTableName={}, foreignCatalog={}, "
+ + "foreignSchema={}, foreignTable={}", parentCatalogName, parentSchemaName,
+ parentTableName, foreignCatalog, foreignSchema, foreignTable);
}
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
+ LOG.info("Fetching cross reference metadata");
try {
IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
ForeignKeysRequest fkReq = new ForeignKeysRequest(parentSchemaName, parentTableName, foreignSchemaName, foreignTableName);
@@ -130,21 +135,29 @@ public class GetCrossReferenceOperation extends MetadataOperation {
return;
}
for (SQLForeignKey fk : fks) {
- rowSet.addRow(new Object[] {parentCatalogName,
- fk.getPktable_db(), fk.getPktable_name(), fk.getPkcolumn_name(),
- foreignCatalogName,
- fk.getFktable_db(), fk.getFktable_name(), fk.getFkcolumn_name(),
- fk.getKey_seq(), fk.getUpdate_rule(), fk.getDelete_rule(), fk.getFk_name(),
- fk.getPk_name(), 0});
+ Object[] rowData = new Object[] {parentCatalogName,
+ fk.getPktable_db(), fk.getPktable_name(), fk.getPkcolumn_name(),
+ foreignCatalogName,
+ fk.getFktable_db(), fk.getFktable_name(), fk.getFkcolumn_name(),
+ fk.getKey_seq(), fk.getUpdate_rule(), fk.getDelete_rule(), fk.getFk_name(),
+ fk.getPk_name(), 0};
+ rowSet.addRow(rowData);
+ if (LOG.isDebugEnabled()) {
+ String debugMessage = getDebugMessage("cross reference", RESULT_SET_SCHEMA);
+ LOG.debug(debugMessage, rowData);
+ }
+ }
+ if (LOG.isDebugEnabled() && rowSet.numRows() == 0) {
+ LOG.debug("No cross reference metadata has been returned.");
}
setState(OperationState.FINISHED);
+ LOG.info("Fetching cross reference metadata has been successfully finished");
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
}
-
/* (non-Javadoc)
* @see org.apache.hive.service.cli.Operation#getResultSetSchema()
*/
http://git-wip-us.apache.org/repos/asf/hive/blob/47764972/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
index 091bf50..5d5d099 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
@@ -41,12 +41,17 @@ import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* GetFunctionsOperation.
*
*/
public class GetFunctionsOperation extends MetadataOperation {
+
+ private static final Logger LOG = LoggerFactory.getLogger(GetFunctionsOperation.class.getName());
+
private static final TableSchema RESULT_SET_SCHEMA = new TableSchema()
.addPrimitiveColumn("FUNCTION_CAT", Type.STRING_TYPE,
"Function catalog (may be null)")
@@ -74,11 +79,15 @@ public class GetFunctionsOperation extends MetadataOperation {
this.schemaName = schemaName;
this.functionName = functionName;
this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
+ LOG.info(
+ "Starting GetFunctionsOperation with the following parameters: catalogName={}, schemaName={}, functionName={}",
+ catalogName, schemaName, functionName);
}
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
+ LOG.info("Fetching function metadata");
if (isAuthV2Enabled()) {
// get databases for schema pattern
IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
@@ -115,16 +124,24 @@ public class GetFunctionsOperation extends MetadataOperation {
functionInfo.getClass().getCanonicalName()
};
rowSet.addRow(rowData);
+
+ if (LOG.isDebugEnabled()) {
+ String debugMessage = getDebugMessage("function", RESULT_SET_SCHEMA);
+ LOG.debug(debugMessage, rowData);
+ }
}
}
+ if (LOG.isDebugEnabled() && rowSet.numRows() == 0) {
+ LOG.debug("No function metadata has been returned");
+ }
setState(OperationState.FINISHED);
+ LOG.info("Fetching function metadata has been successfully finished");
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
}
-
/* (non-Javadoc)
* @see org.apache.hive.service.cli.Operation#getResultSetSchema()
*/
http://git-wip-us.apache.org/repos/asf/hive/blob/47764972/service/src/java/org/apache/hive/service/cli/operation/GetPrimaryKeysOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetPrimaryKeysOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetPrimaryKeysOperation.java
index e603fdd..55f4ab6 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetPrimaryKeysOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetPrimaryKeysOperation.java
@@ -22,12 +22,10 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import org.apache.commons.lang.NumberUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest;
import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.hive.serde2.thrift.Type;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationState;
@@ -35,14 +33,18 @@ import org.apache.hive.service.cli.OperationType;
import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
-import org.apache.hadoop.hive.serde2.thrift.Type;
import org.apache.hive.service.cli.session.HiveSession;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* GetPrimaryKeysOperation.
*
*/
public class GetPrimaryKeysOperation extends MetadataOperation {
+
+ private static final Logger LOG = LoggerFactory.getLogger(GetPrimaryKeysOperation.class.getName());
+
/**
TABLE_CAT String => table catalog (may be null)
TABLE_SCHEM String => table schema (may be null)
@@ -78,11 +80,15 @@ PK_NAME String => primary key name (may be null)
this.schemaName = schemaName;
this.tableName = tableName;
this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
+ LOG.info(
+ "Starting GetPrimaryKeysOperation with the following parameters: catalogName={}, schemaName={}, tableName={}",
+ catalogName, schemaName, tableName);
}
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
+ LOG.info("Fetching primary key metadata");
try {
IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
PrimaryKeysRequest sqlReq = new PrimaryKeysRequest(schemaName, tableName);
@@ -91,17 +97,31 @@ PK_NAME String => primary key name (may be null)
return;
}
for (SQLPrimaryKey pk : pks) {
- rowSet.addRow(new Object[] {catalogName, pk.getTable_db(),
- pk.getTable_name(), pk.getColumn_name(), pk.getKey_seq(), pk.getPk_name()});
- }
- setState(OperationState.FINISHED);
- } catch (Exception e) {
- setState(OperationState.ERROR);
- throw new HiveSQLException(e);
- }
+ Object[] rowData = new Object[] {
+ catalogName,
+ pk.getTable_db(),
+ pk.getTable_name(),
+ pk.getColumn_name(),
+ pk.getKey_seq(),
+ pk.getPk_name()
+ };
+ rowSet.addRow(rowData);
+ if (LOG.isDebugEnabled()) {
+ String debugMessage = getDebugMessage("primary key", RESULT_SET_SCHEMA);
+ LOG.debug(debugMessage, rowData);
+ }
+ }
+ if (LOG.isDebugEnabled() && rowSet.numRows() == 0) {
+ LOG.debug("No primary key metadata has been returned.");
+ }
+ setState(OperationState.FINISHED);
+ LOG.info("Fetching primary key metadata has been successfully finished");
+ } catch (Exception e) {
+ setState(OperationState.ERROR);
+ throw new HiveSQLException(e);
+ }
}
-
/* (non-Javadoc)
* @see org.apache.hive.service.cli.Operation#getResultSetSchema()
*/
http://git-wip-us.apache.org/repos/asf/hive/blob/47764972/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
index de09ec9..b978787 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
@@ -31,12 +31,17 @@ import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* GetSchemasOperation.
*
*/
public class GetSchemasOperation extends MetadataOperation {
+
+ private static final Logger LOG = LoggerFactory.getLogger(GetSchemasOperation.class.getName());
+
private final String catalogName;
private final String schemaName;
@@ -51,11 +56,15 @@ public class GetSchemasOperation extends MetadataOperation {
this.catalogName = catalogName;
this.schemaName = schemaName;
this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
+ LOG.info(
+ "Starting GetSchemasOperation with the following parameters: catalogName={}, schemaName={}",
+ catalogName, schemaName);
}
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
+ LOG.info("Fetching schema metadata");
if (isAuthV2Enabled()) {
String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName;
authorizeMetaGets(HiveOperationType.GET_SCHEMAS, null, cmdStr);
@@ -65,15 +74,22 @@ public class GetSchemasOperation extends MetadataOperation {
String schemaPattern = convertSchemaPattern(schemaName);
for (String dbName : metastoreClient.getDatabases(schemaPattern)) {
rowSet.addRow(new Object[] {dbName, DEFAULT_HIVE_CATALOG});
+ if (LOG.isDebugEnabled()) {
+ String debugMessage = getDebugMessage("schema", RESULT_SET_SCHEMA);
+ LOG.debug(debugMessage, dbName, DEFAULT_HIVE_CATALOG);
+ }
+ }
+ if (LOG.isDebugEnabled() && rowSet.numRows() == 0) {
+ LOG.debug("No schema metadata has been returned.");
}
setState(OperationState.FINISHED);
+ LOG.info("Fetching schema metadata has been successfully finished");
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
}
-
/* (non-Javadoc)
* @see org.apache.hive.service.cli.Operation#getResultSetSchema()
*/
http://git-wip-us.apache.org/repos/asf/hive/blob/47764972/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
index 59cfbb2..b5dac4b 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
@@ -32,6 +32,8 @@ import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* GetTableTypesOperation.
@@ -39,6 +41,8 @@ import org.apache.hive.service.cli.session.HiveSession;
*/
public class GetTableTypesOperation extends MetadataOperation {
+ private static final Logger LOG = LoggerFactory.getLogger(GetTableTypesOperation.class.getName());
+
protected static TableSchema RESULT_SET_SCHEMA = new TableSchema()
.addStringColumn("TABLE_TYPE", "Table type name.");
@@ -51,19 +55,30 @@ public class GetTableTypesOperation extends MetadataOperation {
getParentSession().getHiveConf().getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING);
tableTypeMapping = TableTypeMappingFactory.getTableTypeMapping(tableMappingStr);
rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
+ LOG.info("Starting GetTableTypesOperation");
}
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
+ LOG.info("Fetching table type metadata");
if (isAuthV2Enabled()) {
authorizeMetaGets(HiveOperationType.GET_TABLETYPES, null);
}
try {
for (TableType type : TableType.values()) {
- rowSet.addRow(new String[] {tableTypeMapping.mapToClientType(type.toString())});
+ String tableType = tableTypeMapping.mapToClientType(type.toString());
+ rowSet.addRow(new String[] {tableType});
+ if (LOG.isDebugEnabled()) {
+ String debugMessage = getDebugMessage("table type", RESULT_SET_SCHEMA);
+ LOG.debug(debugMessage, tableType);
+ }
+ }
+ if (LOG.isDebugEnabled() && rowSet.numRows() == 0) {
+ LOG.debug("No table type metadata has been returned.");
}
setState(OperationState.FINISHED);
+ LOG.info("Fetching table type metadata has been successfully finished");
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
http://git-wip-us.apache.org/repos/asf/hive/blob/47764972/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
index c9233d0..1b5b09a 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
@@ -36,6 +36,8 @@ import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* GetTablesOperation.
@@ -43,6 +45,8 @@ import org.apache.hive.service.cli.session.HiveSession;
*/
public class GetTablesOperation extends MetadataOperation {
+ private static final Logger LOG = LoggerFactory.getLogger(GetTablesOperation.class.getName());
+
private final String catalogName;
private final String schemaName;
private final String tableName;
@@ -85,11 +89,16 @@ public class GetTablesOperation extends MetadataOperation {
tableTypeList = null;
}
this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
+ LOG.info("Starting GetTablesOperation with the following parameters: "
+ + "catalogName={}, schemaName={}, tableName={}, tableTypes={}",
+ catalogName, schemaName, tableName,
+ tableTypeList != null ? tableTypeList.toString() : "null");
}
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
+ LOG.info("Fetching table metadata");
try {
IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
String schemaPattern = convertSchemaPattern(schemaName);
@@ -104,16 +113,27 @@ public class GetTablesOperation extends MetadataOperation {
for (TableMeta tableMeta :
metastoreClient.getTableMeta(schemaPattern, tablePattern, tableTypeList)) {
+ String tableType = tableTypeMapping.mapToClientType(tableMeta.getTableType());
rowSet.addRow(new Object[] {
DEFAULT_HIVE_CATALOG,
tableMeta.getDbName(),
tableMeta.getTableName(),
- tableTypeMapping.mapToClientType(tableMeta.getTableType()),
+ tableType,
tableMeta.getComments(),
null, null, null, null, null
});
+
+ if (LOG.isDebugEnabled()) {
+ String debugMessage = getDebugMessage("table", RESULT_SET_SCHEMA);
+ LOG.debug(debugMessage, DEFAULT_HIVE_CATALOG, tableMeta.getDbName(),
+ tableMeta.getTableName(), tableType, tableMeta.getComments());
+ }
+ }
+ if (LOG.isDebugEnabled() && rowSet.numRows() == 0) {
+ LOG.debug("No table metadata has been returned.");
}
setState(OperationState.FINISHED);
+ LOG.info("Fetching table metadata has been successfully finished");
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
http://git-wip-us.apache.org/repos/asf/hive/blob/47764972/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
index ac078b4..e3d26e4 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
@@ -31,6 +31,8 @@ import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* GetTypeInfoOperation.
@@ -38,6 +40,8 @@ import org.apache.hive.service.cli.session.HiveSession;
*/
public class GetTypeInfoOperation extends MetadataOperation {
+ private static final Logger LOG = LoggerFactory.getLogger(GetTypeInfoOperation.class.getName());
+
private final static TableSchema RESULT_SET_SCHEMA = new TableSchema()
.addPrimitiveColumn("TYPE_NAME", Type.STRING_TYPE,
"Type name")
@@ -81,11 +85,13 @@ public class GetTypeInfoOperation extends MetadataOperation {
protected GetTypeInfoOperation(HiveSession parentSession) {
super(parentSession, OperationType.GET_TYPE_INFO);
rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
+ LOG.info("Starting GetTypeInfoOperation");
}
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
+ LOG.info("Fetching type info metadata");
if (isAuthV2Enabled()) {
authorizeMetaGets(HiveOperationType.GET_TYPEINFO, null);
}
@@ -112,15 +118,22 @@ public class GetTypeInfoOperation extends MetadataOperation {
type.getNumPrecRadix() //NUM_PREC_RADIX
};
rowSet.addRow(rowData);
+ if (LOG.isDebugEnabled()) {
+ String debugMessage = getDebugMessage("type info", RESULT_SET_SCHEMA);
+ LOG.debug(debugMessage, rowData);
+ }
+ }
+ if (LOG.isDebugEnabled() && rowSet.numRows() == 0) {
+ LOG.debug("No type info metadata has been returned.");
}
setState(OperationState.FINISHED);
+ LOG.info("Fetching type info metadata has been successfully finished");
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
}
-
/* (non-Javadoc)
* @see org.apache.hive.service.cli.Operation#getResultSetSchema()
*/
http://git-wip-us.apache.org/repos/asf/hive/blob/47764972/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java b/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
index bf7c021..3be21b5 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginEx
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.service.cli.ColumnDescriptor;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationState;
import org.apache.hive.service.cli.OperationType;
@@ -151,4 +152,20 @@ public abstract class MetadataOperation extends Operation {
throw new UnsupportedOperationException("MetadataOperation.cancel()");
}
+ protected String getDebugMessage(final String type, final TableSchema resultSetSchema) {
+ StringBuilder debugMessage = new StringBuilder();
+ debugMessage.append("Returning ");
+ debugMessage.append(type);
+ debugMessage.append(" metadata: ");
+ boolean firstColumn = true;
+ for (ColumnDescriptor column : resultSetSchema.getColumnDescriptors()) {
+ if (!firstColumn) {
+ debugMessage.append(", ");
+ }
+ debugMessage.append(column.getName());
+ debugMessage.append("={}");
+ firstColumn = false;
+ }
+ return debugMessage.toString();
+ }
}