You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2016/09/08 01:52:41 UTC

[21/38] hive git commit: HIVE-14570 : Create table with column names ROW__ID, INPUT__FILE__NAME, BLOCK__OFFSET__INSIDE__FILE sucess but query fails (Niklaus Xiao via Ashutosh Chauhan)

HIVE-14570 : Create table with column names ROW__ID, INPUT__FILE__NAME, BLOCK__OFFSET__INSIDE__FILE sucess but query fails (Niklaus Xiao via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan <ha...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/83752a6b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/83752a6b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/83752a6b

Branch: refs/heads/hive-14535
Commit: 83752a6bd7308b15398caf9743cf3d800781dac9
Parents: 4cc783e
Author: niklaus xiao <st...@live.cn>
Authored: Thu Aug 18 04:11:00 2016 -0800
Committer: Ashutosh Chauhan <ha...@apache.org>
Committed: Tue Sep 6 10:25:56 2016 -0700

----------------------------------------------------------------------
 ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java  |  1 +
 .../hadoop/hive/ql/parse/BaseSemanticAnalyzer.java   | 15 ++++++++++-----
 2 files changed, 11 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/83752a6b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index 602b4fc..001f852 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -449,6 +449,7 @@ public enum ErrorMsg {
   INVALID_PK_SYNTAX(10326, "Invalid Primary Key syntax"),
   ACID_NOT_ENOUGH_HISTORY(10327, "Not enough history available for ({0},{1}).  " +
     "Oldest available base: {2}", true),
+  INVALID_COLUMN_NAME(10328, "Invalid column name"),
   //========================== 20000 range starts here ========================//
   SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."),
   SCRIPT_IO_ERROR(20001, "An error occurred while reading or writing to your custom script. "

http://git-wip-us.apache.org/repos/asf/hive/blob/83752a6b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index db7aeef..e0e9b12 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -58,11 +58,7 @@ import org.apache.hadoop.hive.ql.hooks.LineageInfo;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.lib.Node;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
-import org.apache.hadoop.hive.ql.metadata.Partition;
-import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.metadata.*;
 import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -670,6 +666,7 @@ public abstract class BaseSemanticAnalyzer {
        throw new SemanticException(
          ErrorMsg.INVALID_PK_SYNTAX.getMsg(" VALIDATE feature not supported yet"));
      }
+      checkColumnName(grandChild.getText());
      pkInfos.add(
        new PKInfo(
          unescapeIdentifier(grandChild.getText().toLowerCase()),
@@ -783,6 +780,7 @@ public abstract class BaseSemanticAnalyzer {
     for (int j = 0; j < child.getChild(fkIndex).getChildCount(); j++) {
       SQLForeignKey sqlForeignKey = new SQLForeignKey();
       Tree fkgrandChild = child.getChild(fkIndex).getChild(j);
+      checkColumnName(fkgrandChild.getText());
       boolean rely = child.getChild(relyIndex).getType() == HiveParser.TOK_VALIDATE;
       boolean enable =  child.getChild(relyIndex+1).getType() == HiveParser.TOK_ENABLE;
       boolean validate =  child.getChild(relyIndex+2).getType() == HiveParser.TOK_VALIDATE;
@@ -810,6 +808,12 @@ public abstract class BaseSemanticAnalyzer {
     }
   }
 
+  private static void checkColumnName(String columnName) throws SemanticException {
+    if (VirtualColumn.VIRTUAL_COLUMN_NAMES.contains(columnName.toUpperCase())) {
+      throw new SemanticException(ErrorMsg.INVALID_COLUMN_NAME.getMsg(columnName));
+    }
+  }
+
   /**
    * Get the list of FieldSchema out of the ASTNode.
    * Additionally, populate the primaryKeys and foreignKeys if any.
@@ -837,6 +841,7 @@ public abstract class BaseSemanticAnalyzer {
           if(lowerCase) {
             name = name.toLowerCase();
           }
+          checkColumnName(name);
           // child 0 is the name of the column
           col.setName(unescapeIdentifier(name));
           // child 1 is the type of the column