You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2010/04/27 01:16:54 UTC

svn commit: r938267 - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/queries/clientnegative/ ql/src/test/results/clientnegative/ ql/src/test/results/compiler/errors/

Author: namit
Date: Mon Apr 26 23:16:54 2010
New Revision: 938267

URL: http://svn.apache.org/viewvc?rev=938267&view=rev
Log:
HIVE-1325. dynamic partition insert should throw an exception if the number of target table
columns + dynamic partition columns does not equal to the number of select columns
(Ning Zhang via namit)


Added:
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/dyn_part2.q
    hadoop/hive/trunk/ql/src/test/results/clientnegative/dyn_part2.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/test/results/compiler/errors/insert_wrong_number_columns.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=938267&r1=938266&r2=938267&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Mon Apr 26 23:16:54 2010
@@ -393,6 +393,10 @@ Trunk -  Unreleased
     HIVE-1321. bugs with temp directories, trailing blank fields in HBase bulk load
     (John Sichi via namit)
 
+    HIVE-1325. dynamic partition insert should throw an exception if the number of target table
+    columns + dynamic partition columns does not equal to the number of select columns
+    (Ning Zhang via namit)
+
 Release 0.5.0 -  Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=938267&r1=938266&r2=938267&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Mon Apr 26 23:16:54 2010
@@ -3469,17 +3469,20 @@ public class SemanticAnalyzer extends Ba
     boolean dynPart = HiveConf.getBoolVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONING);
     ArrayList<ColumnInfo> rowFields = opParseCtx.get(input).getRR()
         .getColumnInfos();
-    if (tableFields.size() != rowFields.size()) {
-      if (!dynPart || dpCtx == null ||
-          tableFields.size() + dpCtx.getNumDPCols() != rowFields.size()) {
-        String reason = "Table " + dest + " has " + tableFields.size()
-       	   + " columns but query has " + rowFields.size() + " columns.";
-      	throw new SemanticException(ErrorMsg.TARGET_TABLE_COLUMN_MISMATCH.getMsg(
-       	   qb.getParseInfo().getDestForClause(dest), reason));
-      } else {
-        // create the mapping from input ExprNode to dest table DP column
-        dpCtx.mapInputToDP(rowFields.subList(tableFields.size(), rowFields.size()));
-      }
+    int inColumnCnt = rowFields.size();
+    int outColumnCnt = tableFields.size();
+    if (dynPart && dpCtx != null) {
+        outColumnCnt += dpCtx.getNumDPCols();
+    }
+
+    if (inColumnCnt != outColumnCnt) {
+      String reason = "Table " + dest + " has " + outColumnCnt
+          + " columns, but query has " + inColumnCnt + " columns.";
+      throw new SemanticException(ErrorMsg.TARGET_TABLE_COLUMN_MISMATCH.getMsg(
+          qb.getParseInfo().getDestForClause(dest), reason));
+    } else if (dynPart && dpCtx != null){
+      // create the mapping from input ExprNode to dest table DP column
+      dpCtx.mapInputToDP(rowFields.subList(tableFields.size(), rowFields.size()));
     }
 
     // Check column types

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/dyn_part2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/dyn_part2.q?rev=938267&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/dyn_part2.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/dyn_part2.q Mon Apr 26 23:16:54 2010
@@ -0,0 +1,11 @@
+drop table nzhang_part1;
+create table nzhang_part1 (key string, value string) partitioned by (ds string, hr string);
+
+set hive.exec.dynamic.partition=true;
+
+insert overwrite table nzhang_part1 partition(ds='11', hr) select key, value from srcpart where ds is not null;
+
+show partitions nzhang_part1;
+
+drop table nzhang_part1;
+

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/dyn_part2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/dyn_part2.q.out?rev=938267&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/dyn_part2.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/dyn_part2.q.out Mon Apr 26 23:16:54 2010
@@ -0,0 +1,10 @@
+PREHOOK: query: drop table nzhang_part1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table nzhang_part1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table nzhang_part1 (key string, value string) partitioned by (ds string, hr string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table nzhang_part1 (key string, value string) partitioned by (ds string, hr string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@nzhang_part1
+FAILED: Error in semantic analysis: line 3:23 Cannot insert into target table because column number/types are different hr: Table insclause-0 has 3 columns, but query has 2 columns.

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/errors/insert_wrong_number_columns.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/errors/insert_wrong_number_columns.q.out?rev=938267&r1=938266&r2=938267&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/errors/insert_wrong_number_columns.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/errors/insert_wrong_number_columns.q.out Mon Apr 26 23:16:54 2010
@@ -1,2 +1,2 @@
 Semantic Exception: 
-line 2:23 Cannot insert into target table because column number/types are different dest1: Table insclause-0 has 2 columns but query has 3 columns.
\ No newline at end of file
+line 2:23 Cannot insert into target table because column number/types are different dest1: Table insclause-0 has 2 columns, but query has 3 columns.
\ No newline at end of file