You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2011/02/23 15:57:02 UTC

svn commit: r1073759 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java test/queries/clientnegative/dyn_part_merge.q test/results/clientnegative/dyn_part_merge.q.out

Author: namit
Date: Wed Feb 23 14:57:02 2011
New Revision: 1073759

URL: http://svn.apache.org/viewvc?rev=1073759&view=rev
Log:
HIVE-1980 Merging using mapreduce rather than map-only job failed in case of dynamic partition inserts (Ning Zhang via namit)

Trac Bug: #

Blame Rev:

Reviewed By:

Test Plan:

Revert Plan:

Database Impact:

Memcache Impact:

Other Notes:

EImportant:

- begin *PUBLIC* platform impact section -
Bugzilla: #
- end platform impact -


Added:
    hive/trunk/ql/src/test/queries/clientnegative/dyn_part_merge.q
    hive/trunk/ql/src/test/results/clientnegative/dyn_part_merge.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java?rev=1073759&r1=1073758&r2=1073759&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java Wed Feb 23 14:57:02 2011
@@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.lib.Nod
 import org.apache.hadoop.hive.ql.lib.NodeProcessor;
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMRMapJoinCtx;
+import org.apache.hadoop.hive.ql.parse.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
 import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
@@ -277,6 +278,9 @@ public class GenMRFileSink1 implements N
       createMap4Merge(fsOp, ctx, finalName);
       LOG.info("use CombineHiveInputformat for the merge job");
     } else {
+      if (fsOp.getConf().getDynPartCtx() != null) {
+        throw new SemanticException(ErrorMsg.DYNAMIC_PARTITION_MERGE.getMsg());
+      }
       createMapReduce4Merge(fsOp, ctx, finalName);
       LOG.info("use HiveInputFormat for the merge job");
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java?rev=1073759&r1=1073758&r2=1073759&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java Wed Feb 23 14:57:02 2011
@@ -143,9 +143,9 @@ public enum ErrorMsg {
       + "hive.exec.dynamic.partition=true or specify partition column values"),
   DYNAMIC_PARTITION_STRICT_MODE("Dynamic partition strict mode requires at least one "
       + "static partition column. To turn this off set hive.exec.dynamic.partition.mode=nonstrict"),
-  DYNAMIC_PARTITION_MERGE("Dynamic partition does not support merging mapfiles/mapredfiles yet."
-      + "Please set hive.merge.mapfiles and hive.merge.mapredfiles to false or use static "
-      +	"partitions"),
+  DYNAMIC_PARTITION_MERGE("Dynamic partition does not support merging using non-CombineHiveInputFormat."
+      + "Please check your hive.input.format setting and make sure your Hadoop version support "
+      + "CombineFileInputFormat."),
   NONEXISTPARTCOL("Non-Partition column appears in the partition specification: "),
   UNSUPPORTED_TYPE("DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use "
       + "STRING instead."),

Added: hive/trunk/ql/src/test/queries/clientnegative/dyn_part_merge.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/dyn_part_merge.q?rev=1073759&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/dyn_part_merge.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/dyn_part_merge.q Wed Feb 23 14:57:02 2011
@@ -0,0 +1,9 @@
+set hive.exec.dynamic.partition=true;
+set hive.exec.dynamic.partition.mode=nonstrict;
+set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
+set hive.mergejob.maponly=false;
+set hive.merge.mapfiles=true;
+
+create table dyn_merge(key string, value string) partitioned by (ds string);
+
+insert overwrite table dyn_merge partition(ds) select key, value, ds from srcpart where ds is not null;

Added: hive/trunk/ql/src/test/results/clientnegative/dyn_part_merge.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/dyn_part_merge.q.out?rev=1073759&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/dyn_part_merge.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/dyn_part_merge.q.out Wed Feb 23 14:57:02 2011
@@ -0,0 +1,6 @@
+PREHOOK: query: create table dyn_merge(key string, value string) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table dyn_merge(key string, value string) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dyn_merge
+FAILED: Error in semantic analysis: Dynamic partition does not support merging using non-CombineHiveInputFormat.Please check your hive.input.format setting and make sure your Hadoop version support CombineFileInputFormat.