You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kg...@apache.org on 2019/01/23 09:50:08 UTC

[1/2] hive git commit: HIVE-16907: "INSERT INTO" overwrite old data when destination table encapsulated by backquote (Zoltan Haindrich reviewed by Jesus Camacho Rodriguez)

Repository: hive
Updated Branches:
  refs/heads/master dfd63d979 -> eba9646b4


HIVE-16907: "INSERT INTO"  overwrite old data when destination table encapsulated by backquote (Zoltan Haindrich reviewed by Jesus Camacho Rodriguez)

Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/461d8a04
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/461d8a04
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/461d8a04

Branch: refs/heads/master
Commit: 461d8a04fa233b4351ab514d408eaa49f5167fff
Parents: dfd63d9
Author: Zoltan Haindrich <ki...@rxd.hu>
Authored: Wed Jan 23 10:40:13 2019 +0100
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Wed Jan 23 10:40:13 2019 +0100

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/ql/ErrorMsg.java     |   1 +
 .../hive/ql/parse/BaseSemanticAnalyzer.java     |  32 ++++--
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |   4 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |   6 +-
 .../hadoop/hive/ql/parse/SubQueryUtils.java     | 106 +++++++++----------
 .../clientnegative/create_table_failure2.q      |   2 +-
 .../clientnegative/create_table_failure4.q      |   2 +-
 .../clientnegative/incorrectly_quoted_insert.q  |   5 +
 .../clientnegative/table_create_with_dot.q      |   2 +
 .../incorrectly_quoted_insert.q.out             |  29 +++++
 .../clientnegative/table_create_with_dot.q.out  |   7 ++
 11 files changed, 132 insertions(+), 64 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index d58f626..83053d1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -470,6 +470,7 @@ public enum ErrorMsg {
   RESOURCE_PLAN_ALREADY_EXISTS(10417, "Resource plan {0} already exists", true),
   RESOURCE_PLAN_NOT_EXISTS(10418, "Resource plan {0} does not exist", true),
   INCOMPATIBLE_STRUCT(10419, "Incompatible structs.", true),
+  OBJECTNAME_CONTAINS_DOT(10420, "Table or database name may not contain dot(.) character", true),
 
   //========================== 20000 range starts here ========================//
 

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index fb31254..e6779b2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -364,24 +364,36 @@ public abstract class BaseSemanticAnalyzer {
    * Get dequoted name from a table/column node.
    * @param tableOrColumnNode the table or column node
    * @return for table node, db.tab or tab. for column node column.
+   * @throws SemanticException
    */
-  public static String getUnescapedName(ASTNode tableOrColumnNode) {
+  public static String getUnescapedName(ASTNode tableOrColumnNode) throws SemanticException {
     return getUnescapedName(tableOrColumnNode, null);
   }
 
-  public static Map.Entry<String,String> getDbTableNamePair(ASTNode tableNameNode) {
-    assert(tableNameNode.getToken().getType() == HiveParser.TOK_TABNAME);
+  public static Map.Entry<String, String> getDbTableNamePair(ASTNode tableNameNode) throws SemanticException {
+
+    if (tableNameNode.getType() != HiveParser.TOK_TABNAME ||
+        (tableNameNode.getChildCount() != 1 && tableNameNode.getChildCount() != 2)) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME.getMsg(tableNameNode));
+    }
+
     if (tableNameNode.getChildCount() == 2) {
       String dbName = unescapeIdentifier(tableNameNode.getChild(0).getText());
       String tableName = unescapeIdentifier(tableNameNode.getChild(1).getText());
+      if (dbName.contains(".") || tableName.contains(".")) {
+        throw new SemanticException(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(tableNameNode));
+      }
       return Pair.of(dbName, tableName);
     } else {
       String tableName = unescapeIdentifier(tableNameNode.getChild(0).getText());
+      if (tableName.contains(".")) {
+        throw new SemanticException(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(tableNameNode));
+      }
       return Pair.of(null,tableName);
     }
   }
 
-  public static String getUnescapedName(ASTNode tableOrColumnNode, String currentDatabase) {
+  public static String getUnescapedName(ASTNode tableOrColumnNode, String currentDatabase) throws SemanticException {
     int tokenType = tableOrColumnNode.getToken().getType();
     if (tokenType == HiveParser.TOK_TABNAME) {
       // table node
@@ -410,9 +422,15 @@ public abstract class BaseSemanticAnalyzer {
     if (tabNameNode.getChildCount() == 2) {
       String dbName = unescapeIdentifier(tabNameNode.getChild(0).getText());
       String tableName = unescapeIdentifier(tabNameNode.getChild(1).getText());
+      if (dbName.contains(".") || tableName.contains(".")) {
+        throw new SemanticException(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(tabNameNode));
+      }
       return new String[] {dbName, tableName};
     }
     String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText());
+    if (tableName.contains(".")) {
+      throw new SemanticException(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(tabNameNode));
+    }
     return Utilities.getDbTableName(tableName);
   }
 
@@ -434,8 +452,9 @@ public abstract class BaseSemanticAnalyzer {
    * @param node the table node
    * @return the table name without schema qualification
    *         (i.e., if name is "db.table" or "table", returns "table")
+   * @throws SemanticException
    */
-  public static String getUnescapedUnqualifiedTableName(ASTNode node) {
+  public static String getUnescapedUnqualifiedTableName(ASTNode node) throws SemanticException {
     assert node.getChildCount() <= 2;
 
     if (node.getChildCount() == 2) {
@@ -2288,12 +2307,13 @@ public abstract class BaseSemanticAnalyzer {
         Configuration conf = new Configuration();
         conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, keystore);
         boolean found = false;
-        for (CredentialProvider provider : CredentialProviderFactory.getProviders(conf))
+        for (CredentialProvider provider : CredentialProviderFactory.getProviders(conf)) {
           if (provider instanceof AbstractJavaKeyStoreProvider) {
             Path path = ((AbstractJavaKeyStoreProvider) provider).getPath();
             inputs.add(toReadEntity(path));
             found = true;
           }
+        }
         if (!found) {
           throw new SemanticException("Cannot recognize keystore " + keystore + ", only JavaKeyStoreProvider is " +
                   "supported");

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 0e5b3e5..db3b427 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -3593,7 +3593,9 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
       // Compile internal query to capture underlying table partition dependencies
       StringBuilder cmd = new StringBuilder();
       cmd.append("SELECT * FROM ");
-      cmd.append(HiveUtils.unparseIdentifier(getDotName(qualified)));
+      cmd.append(HiveUtils.unparseIdentifier(qualified[0]));
+      cmd.append(".");
+      cmd.append(HiveUtils.unparseIdentifier(qualified[1]));
       cmd.append(" WHERE ");
       boolean firstOr = true;
       for (int i = 0; i < addPartitionDesc.getPartitionCount(); ++i) {

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 54f34f6..adce54c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -995,7 +995,8 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     }
     return new int[] {aliasIndex, propsIndex, tsampleIndex, ssampleIndex};
   }
-  String findSimpleTableName(ASTNode tabref, int aliasIndex) {
+
+  String findSimpleTableName(ASTNode tabref, int aliasIndex) throws SemanticException {
     assert tabref.getType() == HiveParser.TOK_TABREF;
     ASTNode tableTree = (ASTNode) (tabref.getChild(0));
 
@@ -11840,7 +11841,8 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
   }
 
   private static void walkASTAndQualifyNames(ASTNode ast,
-      Set<String> cteAlias, Context ctx, Hive db, Set<Integer> ignoredTokens, UnparseTranslator unparseTranslator) {
+      Set<String> cteAlias, Context ctx, Hive db, Set<Integer> ignoredTokens, UnparseTranslator unparseTranslator)
+      throws SemanticException {
     Queue<Node> queue = new LinkedList<>();
     queue.add(ast);
     while (!queue.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
index 3c4e3d5..099157f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
@@ -297,13 +297,13 @@ public class SubQueryUtils {
     return r;
   }
 
-  static List<String> getTableAliasesInSubQuery(ASTNode fromClause) {
+  static List<String> getTableAliasesInSubQuery(ASTNode fromClause) throws SemanticException {
     List<String> aliases = new ArrayList<String>();
     getTableAliasesInSubQuery((ASTNode) fromClause.getChild(0), aliases);
     return aliases;
   }
 
-  private static void getTableAliasesInSubQuery(ASTNode joinNode, List<String> aliases) {
+  private static void getTableAliasesInSubQuery(ASTNode joinNode, List<String> aliases) throws SemanticException {
 
     if ((joinNode.getToken().getType() == HiveParser.TOK_TABREF)
         || (joinNode.getToken().getType() == HiveParser.TOK_SUBQUERY)
@@ -324,7 +324,7 @@ public class SubQueryUtils {
       getTableAliasesInSubQuery(right, aliases);
     }
   }
-  
+
   static ASTNode hasUnQualifiedColumnReferences(ASTNode ast) {
     int type = ast.getType();
     if ( type == HiveParser.DOT ) {
@@ -333,7 +333,7 @@ public class SubQueryUtils {
     else if ( type == HiveParser.TOK_TABLE_OR_COL ) {
       return ast;
     }
-    
+
     for(int i=0; i < ast.getChildCount(); i++ ) {
       ASTNode c = hasUnQualifiedColumnReferences((ASTNode) ast.getChild(i));
       if ( c != null ) {
@@ -368,7 +368,7 @@ public class SubQueryUtils {
     }
     return ast;
   }
-  
+
   static ASTNode subQueryWhere(ASTNode insertClause) {
     if (insertClause.getChildCount() > 2 &&
         insertClause.getChild(2).getType() == HiveParser.TOK_WHERE ) {
@@ -502,15 +502,15 @@ public class SubQueryUtils {
    * This Subquery is joined with the Outer Query plan on the join condition 'c = 0'.
    * The join condition ensures that in case there are null values in the joining column
    * the Query returns no rows.
-   * 
+   *
    * The AST tree for this is:
-   * 
+   *
    * ^(TOK_QUERY
    *    ^(TOK FROM
    *        ^(TOK_SUBQUERY
    *            {the input SubQuery, with correlation removed}
-   *            subQueryAlias 
-   *          ) 
+   *            subQueryAlias
+   *          )
    *     )
    *     ^(TOK_INSERT
    *         ^(TOK_DESTINATION...)
@@ -518,51 +518,51 @@ public class SubQueryUtils {
    *             ^(TOK_SELECTEXPR {ast tree for count *}
    *          )
    *          ^(TOK_WHERE
-   *             {is null check for joining column} 
+   *             {is null check for joining column}
    *           )
    *      )
    * )
-   */  
-  static ASTNode buildNotInNullCheckQuery(ASTNode subQueryAST, 
-      String subQueryAlias, 
+   */
+  static ASTNode buildNotInNullCheckQuery(ASTNode subQueryAST,
+      String subQueryAlias,
       String cntAlias,
       List<ASTNode> corrExprs,
       RowResolver sqRR) {
-    
+
     subQueryAST = (ASTNode) ParseDriver.adaptor.dupTree(subQueryAST);
-    ASTNode qry = (ASTNode) 
+    ASTNode qry = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_QUERY, "TOK_QUERY");
-    
+
     qry.addChild(buildNotInNullCheckFrom(subQueryAST, subQueryAlias));
     ASTNode insertAST = buildNotInNullCheckInsert();
     qry.addChild(insertAST);
     insertAST.addChild(buildNotInNullCheckSelect(cntAlias));
-    insertAST.addChild(buildNotInNullCheckWhere(subQueryAST, 
+    insertAST.addChild(buildNotInNullCheckWhere(subQueryAST,
         subQueryAlias, corrExprs, sqRR));
-    
+
     return qry;
   }
-  
+
   /*
    * build:
    *    ^(TOK FROM
    *        ^(TOK_SUBQUERY
    *            {the input SubQuery, with correlation removed}
-   *            subQueryAlias 
-   *          ) 
+   *            subQueryAlias
+   *          )
    *     )
 
    */
   static ASTNode buildNotInNullCheckFrom(ASTNode subQueryAST, String subQueryAlias) {
     ASTNode from = (ASTNode) ParseDriver.adaptor.create(HiveParser.TOK_FROM, "TOK_FROM");
-    ASTNode sqExpr = (ASTNode) 
+    ASTNode sqExpr = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_SUBQUERY, "TOK_SUBQUERY");
     sqExpr.addChild(subQueryAST);
     sqExpr.addChild(createAliasAST(subQueryAlias));
     from.addChild(sqExpr);
     return from;
   }
-  
+
   /*
    * build
    *     ^(TOK_INSERT
@@ -570,21 +570,21 @@ public class SubQueryUtils {
    *      )
    */
   static ASTNode buildNotInNullCheckInsert() {
-    ASTNode insert = (ASTNode) 
+    ASTNode insert = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_INSERT, "TOK_INSERT");
-    ASTNode dest = (ASTNode) 
+    ASTNode dest = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_DESTINATION, "TOK_DESTINATION");
-    ASTNode dir = (ASTNode) 
+    ASTNode dir = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_DIR, "TOK_DIR");
-    ASTNode tfile = (ASTNode) 
+    ASTNode tfile = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_TMP_FILE, "TOK_TMP_FILE");
     insert.addChild(dest);
     dest.addChild(dir);
     dir.addChild(tfile);
-    
+
     return insert;
   }
-  
+
   /*
    * build:
    *         ^(TOK_SELECT
@@ -592,37 +592,37 @@ public class SubQueryUtils {
    *          )
    */
   static ASTNode buildNotInNullCheckSelect(String cntAlias) {
-    ASTNode select = (ASTNode) 
+    ASTNode select = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_SELECT, "TOK_SELECT");
-    ASTNode selectExpr = (ASTNode) 
+    ASTNode selectExpr = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_SELEXPR, "TOK_SELEXPR");
-    ASTNode countStar = (ASTNode) 
+    ASTNode countStar = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_FUNCTIONSTAR, "TOK_FUNCTIONSTAR");
     ASTNode alias = (createAliasAST(cntAlias));
-    
+
     countStar.addChild((ASTNode) ParseDriver.adaptor.create(HiveParser.Identifier, "count"));
     select.addChild(selectExpr);
     selectExpr.addChild(countStar);
     selectExpr.addChild(alias);
-    
+
     return select;
   }
-  
+
   /*
    * build:
    *          ^(TOK_WHERE
-   *             {is null check for joining column} 
+   *             {is null check for joining column}
    *           )
    */
-  static ASTNode buildNotInNullCheckWhere(ASTNode subQueryAST, 
-      String sqAlias, 
+  static ASTNode buildNotInNullCheckWhere(ASTNode subQueryAST,
+      String sqAlias,
       List<ASTNode> corrExprs,
       RowResolver sqRR) {
-    
+
     ASTNode sqSelect = (ASTNode) subQueryAST.getChild(1).getChild(1);
     ASTNode selExpr = (ASTNode) sqSelect.getChild(0);
     String colAlias = null;
-    
+
     if ( selExpr.getChildCount() == 2 ) {
       colAlias = selExpr.getChild(1).getText();
     } else if (selExpr.getChild(0).getType() != HiveParser.TOK_ALLCOLREF) {
@@ -634,29 +634,29 @@ public class SubQueryUtils {
       String[] joinColName = sqRR.reverseLookup(joinColumn.getInternalName());
       colAlias = joinColName[1];
     }
-    
+
     ASTNode searchCond = isNull(createColRefAST(sqAlias, colAlias));
-    
+
     for(ASTNode e : corrExprs ) {
       ASTNode p = (ASTNode) ParseDriver.adaptor.dupTree(e);
-      p = isNull(p);      
-      searchCond = orAST(searchCond, p);      
+      p = isNull(p);
+      searchCond = orAST(searchCond, p);
     }
-    
+
     ASTNode where = (ASTNode) ParseDriver.adaptor.create(HiveParser.TOK_WHERE, "TOK_WHERE");
     where.addChild(searchCond);
     return where;
   }
-  
+
   static ASTNode buildNotInNullJoinCond(String subqueryAlias, String cntAlias) {
-    
-    ASTNode eq = (ASTNode) 
+
+    ASTNode eq = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.EQUAL, "=");
-    
+
     eq.addChild(createColRefAST(subqueryAlias, cntAlias));
-    eq.addChild((ASTNode) 
+    eq.addChild((ASTNode)
         ParseDriver.adaptor.create(HiveParser.Number, "0"));
-    
+
     return eq;
   }
 
@@ -716,7 +716,7 @@ public class SubQueryUtils {
       }
     }
   }
-  
+
   public static interface ISubQueryJoinInfo {
     public String getAlias();
     public JoinType getJoinType();
@@ -726,7 +726,7 @@ public class SubQueryUtils {
     public String getOuterQueryId();
   };
 
-    
+
   /*
    * Using CommonTreeAdaptor because the Adaptor in ParseDriver doesn't carry
    * the token indexes when duplicating a Tree.

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/test/queries/clientnegative/create_table_failure2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/create_table_failure2.q b/ql/src/test/queries/clientnegative/create_table_failure2.q
index e873f34..48f834b 100644
--- a/ql/src/test/queries/clientnegative/create_table_failure2.q
+++ b/ql/src/test/queries/clientnegative/create_table_failure2.q
@@ -1,2 +1,2 @@
 --! qt:dataset:src
-create table `table_in_database_creation_not_exist.test` as select * from src limit 1;
\ No newline at end of file
+create table table_in_database_creation_not_exist.test as select * from src limit 1;

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/test/queries/clientnegative/create_table_failure4.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/create_table_failure4.q b/ql/src/test/queries/clientnegative/create_table_failure4.q
index 67745e0..6a54873 100644
--- a/ql/src/test/queries/clientnegative/create_table_failure4.q
+++ b/ql/src/test/queries/clientnegative/create_table_failure4.q
@@ -1 +1 @@
-create table `table_in_database_creation_not_exist.test` (a string);
\ No newline at end of file
+create table table_in_database_creation_not_exist.test (a string);

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/test/queries/clientnegative/incorrectly_quoted_insert.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/incorrectly_quoted_insert.q b/ql/src/test/queries/clientnegative/incorrectly_quoted_insert.q
new file mode 100644
index 0000000..bfdf1b6
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/incorrectly_quoted_insert.q
@@ -0,0 +1,5 @@
+create database tdb;
+use tdb;
+create table t1(id int);
+create table t2(id int);
+explain insert into `tdb.t1` select * from t2;

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/test/queries/clientnegative/table_create_with_dot.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/table_create_with_dot.q b/ql/src/test/queries/clientnegative/table_create_with_dot.q
new file mode 100644
index 0000000..5b3a253
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/table_create_with_dot.q
@@ -0,0 +1,2 @@
+create database asd;
+create table `asd.tbl` (a integer);

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out b/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out
new file mode 100644
index 0000000..7b476d5
--- /dev/null
+++ b/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out
@@ -0,0 +1,29 @@
+PREHOOK: query: create database tdb
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:tdb
+POSTHOOK: query: create database tdb
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:tdb
+PREHOOK: query: use tdb
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:tdb
+POSTHOOK: query: use tdb
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:tdb
+PREHOOK: query: create table t1(id int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:tdb
+PREHOOK: Output: tdb@t1
+POSTHOOK: query: create table t1(id int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:tdb
+POSTHOOK: Output: tdb@t1
+PREHOOK: query: create table t2(id int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:tdb
+PREHOOK: Output: tdb@t2
+POSTHOOK: query: create table t2(id int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:tdb
+POSTHOOK: Output: tdb@t2
+FAILED: SemanticException Line 2:20 Table or database name may not contain dot(.) character 'tdb.t1'

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/test/results/clientnegative/table_create_with_dot.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/table_create_with_dot.q.out b/ql/src/test/results/clientnegative/table_create_with_dot.q.out
new file mode 100644
index 0000000..99cdf0c
--- /dev/null
+++ b/ql/src/test/results/clientnegative/table_create_with_dot.q.out
@@ -0,0 +1,7 @@
+PREHOOK: query: create database asd
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:asd
+POSTHOOK: query: create database asd
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:asd
+FAILED: SemanticException Line 2:13 Table or database name may not contain dot(.) character 'asd.tbl'


[2/2] hive git commit: HIVE-21138: Fix some of the alerts raised by lgtm.com (Malcolm Taylor via Zoltan Haindrich)

Posted by kg...@apache.org.
HIVE-21138: Fix some of the alerts raised by lgtm.com (Malcolm Taylor via Zoltan Haindrich)

Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/eba9646b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/eba9646b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/eba9646b

Branch: refs/heads/master
Commit: eba9646b41cc1306b7c5b0f67dcf2853e0b8171d
Parents: 461d8a0
Author: Malcolm Taylor <ma...@semmle.com>
Authored: Wed Jan 23 10:41:48 2019 +0100
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Wed Jan 23 10:41:48 2019 +0100

----------------------------------------------------------------------
 .../hadoop/hive/llap/cli/LlapServiceDriver.java |  2 +-
 .../hadoop/hive/ql/exec/repl/ReplDumpTask.java  |  2 +-
 .../ql/exec/tez/KillTriggerActionHandler.java   | 34 ++++++++++----------
 .../ql/exec/vector/VectorizedBatchUtil.java     | 12 +++----
 .../hive/ql/metadata/CheckConstraint.java       |  2 --
 .../ql/optimizer/lineage/OpProcFactory.java     |  4 +--
 .../hive/ql/optimizer/physical/Vectorizer.java  |  1 -
 .../stats/annotation/StatsRulesProcFactory.java |  5 +--
 .../hadoop/hive/ql/parse/CalcitePlanner.java    |  4 +--
 .../repl/dump/events/AddPartitionHandler.java   |  4 +--
 .../repl/dump/events/CreateTableHandler.java    |  2 +-
 .../hadoop/hive/ql/plan/AggregationDesc.java    |  6 +---
 .../hive/ql/stats/TestStatsUpdaterThread.java   |  6 ++++
 .../hive/metastore/HiveMetaStoreClient.java     |  2 +-
 .../hadoop/hive/metastore/ObjectStore.java      |  4 +--
 .../hadoop/hive/metastore/txn/TxnHandler.java   |  2 +-
 16 files changed, 43 insertions(+), 49 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
index ffdd340..4bc2431 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/cli/LlapServiceDriver.java
@@ -159,7 +159,7 @@ public class LlapServiceDriver {
         conf.set(key, (String) props.getValue());
       } else {
         if (key.startsWith(HiveConf.PREFIX_LLAP) || key.startsWith(HiveConf.PREFIX_HIVE_LLAP)) {
-          LOG.warn("Adding key [{}] even though it is not in the set of known llap-server keys");
+          LOG.warn("Adding key [{}] even though it is not in the set of known llap-server keys", key);
           conf.set(key, (String) props.getValue());
         } else {
           LOG.warn("Ignoring unknown llap server parameter: [{}]", key);

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java
index 497e103..a5b944b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java
@@ -263,7 +263,7 @@ public class ReplDumpTask extends Task<ReplDumpWork> implements Serializable {
                 && TableType.EXTERNAL_TABLE.equals(tableTuple.object.getTableType())
                 && !conf.getBoolVar(HiveConf.ConfVars.REPL_DUMP_METADATA_ONLY);
             if (shouldWriteExternalTableLocationInfo) {
-              LOG.debug("adding table {} to external tables list");
+              LOG.debug("adding table {} to external tables list", tblName);
               writer.dataLocationDump(tableTuple.object);
             }
             dumpTable(dbName, tblName, validTxnList, dbRoot, bootDumpBeginReplId, hiveDb,

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/KillTriggerActionHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/KillTriggerActionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/KillTriggerActionHandler.java
index ee539ba..06e9ff6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/KillTriggerActionHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/KillTriggerActionHandler.java
@@ -37,25 +37,25 @@ public class KillTriggerActionHandler implements TriggerActionHandler<TezSession
   public void applyAction(final Map<TezSessionState, Trigger> queriesViolated) {
     for (Map.Entry<TezSessionState, Trigger> entry : queriesViolated.entrySet()) {
       switch (entry.getValue().getAction().getType()) {
-        case KILL_QUERY:
-          TezSessionState sessionState = entry.getKey();
-          String queryId = sessionState.getWmContext().getQueryId();
-          try {
-            SessionState ss = new SessionState(new HiveConf());
-            ss.setIsHiveServerQuery(true);
-            SessionState.start(ss);
-            KillQuery killQuery = sessionState.getKillQuery();
-            // if kill query is null then session might have been released to pool or closed already
-            if (killQuery != null) {
-              sessionState.getKillQuery().killQuery(queryId, entry.getValue().getViolationMsg(),
+      case KILL_QUERY:
+        TezSessionState sessionState = entry.getKey();
+        String queryId = sessionState.getWmContext().getQueryId();
+        try {
+          SessionState ss = new SessionState(new HiveConf());
+          ss.setIsHiveServerQuery(true);
+          SessionState.start(ss);
+          KillQuery killQuery = sessionState.getKillQuery();
+          // if kill query is null then session might have been released to pool or closed already
+          if (killQuery != null) {
+            sessionState.getKillQuery().killQuery(queryId, entry.getValue().getViolationMsg(),
                       sessionState.getConf());
-            }
-          } catch (HiveException e) {
-            LOG.warn("Unable to kill query {} for trigger violation");
           }
-          break;
-        default:
-          throw new RuntimeException("Unsupported action: " + entry.getValue());
+        } catch (HiveException e) {
+          LOG.warn("Unable to kill query {} for trigger violation", queryId);
+        }
+        break;
+      default:
+        throw new RuntimeException("Unsupported action: " + entry.getValue());
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
index 211f452..617cbf1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
@@ -576,7 +576,12 @@ public class VectorizedBatchUtil {
 
   public static ColumnVector makeLikeColumnVector(ColumnVector source
                                         ) throws HiveException{
-    if (source instanceof LongColumnVector) {
+    if (source instanceof Decimal64ColumnVector) {
+      Decimal64ColumnVector dec64ColVector = (Decimal64ColumnVector) source;
+      return new DecimalColumnVector(dec64ColVector.vector.length,
+          dec64ColVector.precision,
+          dec64ColVector.scale);
+    } else if (source instanceof LongColumnVector) {
       return new LongColumnVector(((LongColumnVector) source).vector.length);
     } else if (source instanceof DoubleColumnVector) {
       return new DoubleColumnVector(((DoubleColumnVector) source).vector.length);
@@ -587,11 +592,6 @@ public class VectorizedBatchUtil {
       return new DecimalColumnVector(decColVector.vector.length,
           decColVector.precision,
           decColVector.scale);
-    } else if (source instanceof Decimal64ColumnVector) {
-        Decimal64ColumnVector dec64ColVector = (Decimal64ColumnVector) source;
-        return new DecimalColumnVector(dec64ColVector.vector.length,
-            dec64ColVector.precision,
-            dec64ColVector.scale);
     } else if (source instanceof TimestampColumnVector) {
       return new TimestampColumnVector(((TimestampColumnVector) source).getLength());
     } else if (source instanceof IntervalDayTimeColumnVector) {

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java
index db11310..af45788 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java
@@ -56,8 +56,6 @@ public class CheckConstraint implements Serializable {
   public CheckConstraint() {}
 
   public CheckConstraint(List<SQLCheckConstraint> checkConstraintsList) {
-    this.tableName = tableName;
-    this.databaseName = databaseName;
     checkConstraints = new TreeMap<String, List<CheckConstraintCol>>();
     checkExpressionList = new ArrayList<>();
     if (checkConstraintsList == null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
index 2360032..825ece6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
@@ -271,9 +271,9 @@ public class OpProcFactory {
         }
         int left = conds[i].getLeft();
         int right = conds[i].getRight();
-        if (joinKeys.length < left
+        if (joinKeys.length <= left
             || joinKeys[left].length == 0
-            || joinKeys.length < right
+            || joinKeys.length <= right
             || joinKeys[right].length == 0
             || parents < left
             || parents < right) {

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
index 5023f2f..1fe0a79 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
@@ -2404,7 +2404,6 @@ public class Vectorizer implements PhysicalPlanResolver {
   @Override
   public PhysicalContext resolve(PhysicalContext physicalContext) throws SemanticException {
 
-    physicalContext = physicalContext;
     hiveConf = physicalContext.getConf();
     planMapper = physicalContext.getContext().getPlanMapper();
 

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
index 1bd2511..b19c7de 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
@@ -31,7 +31,6 @@ import java.util.Optional;
 import java.util.Set;
 import java.util.Stack;
 
-import org.apache.commons.collections.CollectionUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.Context;
@@ -91,7 +90,6 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFInBloomFilter;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualNS;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
@@ -1095,8 +1093,7 @@ public class StatsRulesProcFactory {
         ExprNodeGenericFuncDesc genFunc = (ExprNodeGenericFuncDesc) child;
         GenericUDF udf = genFunc.getGenericUDF();
 
-        if (udf instanceof GenericUDFOPEqual ||
-            udf instanceof GenericUDFOPEqualNS) {
+        if (udf instanceof GenericUDFOPEqual) {
           String colName = null;
           boolean isConst = false;
           Object prevConst = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
index aacf9e3..646ce09 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
@@ -281,7 +281,6 @@ import org.joda.time.Interval;
 import java.io.IOException;
 import java.lang.reflect.Field;
 import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.UndeclaredThrowableException;
 import java.math.BigDecimal;
 import java.util.AbstractMap.SimpleEntry;
 import java.util.ArrayDeque;
@@ -1655,8 +1654,7 @@ public class CalcitePlanner extends SemanticAnalyzer {
   }
 
   private boolean isUselessCause(Throwable t) {
-    return t instanceof RuntimeException || t instanceof InvocationTargetException
-        || t instanceof UndeclaredThrowableException;
+    return t instanceof RuntimeException || t instanceof InvocationTargetException;
   }
 
   private RowResolver genRowResolver(Operator op, QB qb) {

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPartitionHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPartitionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPartitionHandler.java
index 5c16887..0b7f910 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPartitionHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPartitionHandler.java
@@ -54,7 +54,7 @@ class AddPartitionHandler extends AbstractEventHandler {
     AddPartitionMessage apm = (AddPartitionMessage) eventMessage;
     org.apache.hadoop.hive.metastore.api.Table tobj = apm.getTableObj();
     if (tobj == null) {
-      LOG.debug("Event#{} was a ADD_PTN_EVENT with no table listed");
+      LOG.debug("Event#{} was a ADD_PTN_EVENT with no table listed", fromEventId());
       return;
     }
 
@@ -65,7 +65,7 @@ class AddPartitionHandler extends AbstractEventHandler {
 
     Iterable<org.apache.hadoop.hive.metastore.api.Partition> ptns = apm.getPartitionObjs();
     if ((ptns == null) || (!ptns.iterator().hasNext())) {
-      LOG.debug("Event#{} was an ADD_PTN_EVENT with no partitions");
+      LOG.debug("Event#{} was an ADD_PTN_EVENT with no partitions", fromEventId());
       return;
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateTableHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateTableHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateTableHandler.java
index 550a82d..5870876 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateTableHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateTableHandler.java
@@ -47,7 +47,7 @@ class CreateTableHandler extends AbstractEventHandler<CreateTableMessage> {
     org.apache.hadoop.hive.metastore.api.Table tobj = eventMessage.getTableObj();
 
     if (tobj == null) {
-      LOG.debug("Event#{} was a CREATE_TABLE_EVENT with no table listed");
+      LOG.debug("Event#{} was a CREATE_TABLE_EVENT with no table listed", fromEventId());
       return;
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java
index 4b2ef1f..87a7f4d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java
@@ -18,11 +18,8 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Externalizable;
 import java.io.Serializable;
 
-import org.apache.hadoop.hive.common.JavaUtils;
-import org.apache.hadoop.hive.ql.exec.PTFUtils;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -72,8 +69,7 @@ public class AggregationDesc implements java.io.Serializable {
   public void setGenericUDAFEvaluator(
       final GenericUDAFEvaluator genericUDAFEvaluator) {
     this.genericUDAFEvaluator = genericUDAFEvaluator;
-    if (genericUDAFEvaluator instanceof Serializable ||
-        genericUDAFEvaluator instanceof Externalizable) {
+    if (genericUDAFEvaluator instanceof Serializable) {
       this.genericUDAFWritableEvaluator = genericUDAFEvaluator;
     } else {
       this.genericUDAFEvaluatorClassName = genericUDAFEvaluator.getClass().getName();

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUpdaterThread.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUpdaterThread.java b/ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUpdaterThread.java
index 2512579..24acd6d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUpdaterThread.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/stats/TestStatsUpdaterThread.java
@@ -274,6 +274,12 @@ public class TestStatsUpdaterThread {
         Lists.newArrayList("s"), currentWriteIds);
     assertEquals(1, stats.size());
 
+    // Test with null list of partNames
+    stats = msClient.getPartitionColumnStatistics(
+        dbName, tblName, null,
+        Lists.newArrayList("s"), currentWriteIds);
+    assertEquals(0, stats.size());
+
     // New reader.
     currentWriteIds = msClient.getValidWriteIds(fqName).toString();
     stats = msClient.getPartitionColumnStatistics(

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 30edc56..3d4467d 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -924,7 +924,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
       List<String> colNames, String validWriteIdList)
       throws NoSuchObjectException, MetaException, TException {
     PartitionsStatsRequest rqst = new PartitionsStatsRequest(dbName, tableName, colNames,
-        partNames);
+        partNames == null ? new ArrayList<String>() : partNames);
     rqst.setCatName(catName);
     rqst.setValidWriteIdList(validWriteIdList);
     return client.get_partitions_statistics_req(rqst).getPartStats();

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index ad60af7..65fea87 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -8810,7 +8810,7 @@ public class ObjectStore implements RawStore, Configurable {
       List<String> partNames, List<String> colNames,
       String writeIdList)
       throws MetaException, NoSuchObjectException {
-    if (partNames == null && partNames.isEmpty()) {
+    if (partNames == null || partNames.isEmpty()) {
       return null;
     }
     List<ColumnStatistics> allStats = getPartitionColumnStatisticsInternal(
@@ -8897,7 +8897,7 @@ public class ObjectStore implements RawStore, Configurable {
     // If the current stats in the metastore doesn't comply with
     // the isolation level of the query, return null.
     if (writeIdList != null) {
-      if (partNames == null && partNames.isEmpty()) {
+      if (partNames == null || partNames.isEmpty()) {
         return null;
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/eba9646b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index 91a9ab4..a17abfc 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
@@ -2028,7 +2028,7 @@ abstract class TxnHandler implements TxnStore, TxnStore.MutexAPI {
         ValidWriteIdList tblValidWriteIdList =
             validReaderWriteIdList.getTableValidWriteIdList(fullyQualifiedName);
         if (tblValidWriteIdList == null) {
-          LOG.warn("ValidWriteIdList for table {} not present in creation metadata, this should not happen");
+          LOG.warn("ValidWriteIdList for table {} not present in creation metadata, this should not happen", fullyQualifiedName);
           return null;
         }
         query.append(" AND (ctc_writeid > " + tblValidWriteIdList.getHighWatermark());