You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2009/04/10 08:35:03 UTC

svn commit: r763879 [1/8] - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/io/ ql/src/java/org/apache/hadoop/hive/ql/metadata/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/apa...

Author: zshao
Date: Fri Apr 10 06:34:57 2009
New Revision: 763879

URL: http://svn.apache.org/viewvc?rev=763879&view=rev
Log:
HIVE-360. Generalize the FileFormat Interface in Hive. (He Yongqiang via zshao)

Added:
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveOutputFormat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/create_insert_outputformat.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_insert_outputformat.q
    hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_insert_outputformat.q.out
Removed:
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/fileformat_void.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/fileformat_void.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IgnoreKeyTextOutputFormat.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
    hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_bad_class.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_output.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby7_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby8_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby8_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby8_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/implicit_cast1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input11.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input11_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input12.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input1_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input21.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input2_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input3_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_dynamicserde.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_lazyserde.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testsequencefile.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testxpath.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testxpath2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testxpath3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join0.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join10.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join11.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join12.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join13.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join14.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join15.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join16.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join17.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join18.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join19.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join20.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join21.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join22.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/noalias_subq1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_clusterby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_constant_expr.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_gby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_gby2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_gby_join.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_join.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_join2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_join3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_multi_insert.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_random.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_transform.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_union.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/quote1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sort.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/subq.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/subq2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_json.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union10.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union11.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union12.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union13.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union14.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union15.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union9.q.out
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby5.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input3.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input7.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input8.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input9.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_part1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join3.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join5.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join7.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample3.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample5.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample7.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/subq.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/union.q.xml

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Fri Apr 10 06:34:57 2009
@@ -9,6 +9,9 @@
   IMPROVEMENTS
     HIVE-389. Option to build without ivy (jssarma)
 
+    HIVE-360. Generalize the FileFormat Interface in Hive.
+    (He Yongqiang via zshao)
+
   OPTIMIZATIONS
 
     HIVE-279. Predicate Pushdown support (Prasad Chakka via athusoo).

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java Fri Apr 10 06:34:57 2009
@@ -20,6 +20,7 @@
 
 import java.io.*;
 import java.lang.reflect.Method;
+import java.util.Properties;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapred.*;
@@ -28,7 +29,10 @@
 
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.fileSinkDesc;
+import org.apache.hadoop.hive.ql.plan.tableDesc;
 import org.apache.hadoop.hive.ql.exec.FilterOperator.Counter;
+import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
+import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -120,58 +124,19 @@
 
       LOG.info("Writing to temp file: " + outPath);
 
-      OutputFormat<?, ?> outputFormat = conf.getTableInfo().getOutputFileFormatClass().newInstance();
+      HiveOutputFormat<?, ?> hiveOutputFormat = conf.getTableInfo().getOutputFileFormatClass().newInstance();
       final Class<? extends Writable> outputClass = serializer.getSerializedClass();
       boolean isCompressed = conf.getCompressed();
 
-      // The reason to keep these instead of using OutputFormat.getRecordWriter() is that
-      // getRecordWriter does not give us enough control over the file name that we create.
-      if(outputFormat instanceof IgnoreKeyTextOutputFormat) {
-        finalPath = new Path(Utilities.toTempPath(conf.getDirName()), Utilities.getTaskId(hconf) +
-                             Utilities.getFileExtension(jc, isCompressed));
+      // The reason to keep these instead of using
+      // OutputFormat.getRecordWriter() is that
+      // getRecordWriter does not give us enough control over the file name that
+      // we create.
+      Path parent = Utilities.toTempPath(conf.getDirName());
+      finalPath = HiveFileFormatUtils.getOutputFormatFinalPath(parent, jc, hiveOutputFormat, isCompressed, finalPath);
+      tableDesc tableInfo = conf.getTableInfo();
 
-        String rowSeparatorString = conf.getTableInfo().getProperties().getProperty(Constants.LINE_DELIM, "\n");
-        int rowSeparator = 0;
-        try {
-          rowSeparator = Byte.parseByte(rowSeparatorString); 
-        } catch (NumberFormatException e) {
-          rowSeparator = rowSeparatorString.charAt(0); 
-        }
-        final int finalRowSeparator = rowSeparator;  
-        final OutputStream outStream = Utilities.createCompressedStream(jc, fs.create(outPath), isCompressed);
-        outWriter = new RecordWriter () {
-            public void write(Writable r) throws IOException {
-              if (r instanceof Text) {
-                Text tr = (Text)r;
-                outStream.write(tr.getBytes(), 0, tr.getLength());
-                outStream.write(finalRowSeparator);
-              } else {
-                // DynamicSerDe always writes out BytesWritable
-                BytesWritable bw = (BytesWritable)r;
-                outStream.write(bw.get(), 0, bw.getSize());
-                outStream.write(finalRowSeparator);
-              }
-            }
-            public void close(boolean abort) throws IOException {
-              outStream.close();
-            }
-          };
-      } else if (outputFormat instanceof SequenceFileOutputFormat) {
-        final SequenceFile.Writer outStream =
-          Utilities.createSequenceWriter(jc, fs, outPath, BytesWritable.class, outputClass,
-                                         isCompressed);
-        outWriter = new RecordWriter () {
-            public void write(Writable r) throws IOException {
-              outStream.append(commonKey, r);
-            }
-            public void close(boolean abort) throws IOException {
-              outStream.close();
-            }
-          };
-      } else {
-        // should never come here - we should be catching this in ddl command
-        throw new HiveException ("Illegal outputformat: " + outputFormat.getClass().getName());
-      }
+      this.outWriter = getRecordWriter(jc, hiveOutputFormat, outputClass, isCompressed, tableInfo.getProperties(), outPath);
 
       // in recent hadoop versions, use deleteOnExit to clean tmp files.
       try {
@@ -189,6 +154,15 @@
     }
   }
 
+  public static RecordWriter getRecordWriter(JobConf jc, HiveOutputFormat<?, ?> hiveOutputFormat,
+      final Class<? extends Writable> valueClass, boolean isCompressed,
+      Properties tableProp, Path outPath) throws IOException, HiveException {
+    if (hiveOutputFormat != null) {
+      return hiveOutputFormat.getHiveRecordWriter(jc, outPath, valueClass, isCompressed, tableProp, null);
+    }
+    return null;
+  }
+
   Writable recordValue; 
   public void process(Object row, ObjectInspector rowInspector) throws HiveException {
     try {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java Fri Apr 10 06:34:57 2009
@@ -32,6 +32,7 @@
 import org.apache.hadoop.hive.ql.plan.loadFileDesc;
 import org.apache.hadoop.hive.ql.plan.loadTableDesc;
 import org.apache.hadoop.hive.ql.plan.moveWork;
+import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.io.SequenceFile;
@@ -100,8 +101,7 @@
         console.printInfo(mesg, mesg_detail);
 
         if (work.getCheckFileFormat()) {
-          // Get the file format of the table
-          boolean tableIsSequenceFile = tbd.getTable().getInputFileFormatClass().equals(SequenceFileInputFormat.class);
+
           // Get all files from the src directory
           FileStatus [] dirs;
           ArrayList<FileStatus> files;
@@ -118,25 +118,10 @@
           } catch (IOException e) {
             throw new HiveException("addFiles: filesystem error in check phase", e);
           }
+
           // Check if the file format of the file matches that of the table.
-          if (files.size() > 0) {
-            int fileId = 0;
-            boolean fileIsSequenceFile = true;   
-            try {
-              SequenceFile.Reader reader = new SequenceFile.Reader(
-                fs, files.get(fileId).getPath(), conf);
-              reader.close();
-            } catch (IOException e) {
-              fileIsSequenceFile = false;
-            }
-            if (!fileIsSequenceFile && tableIsSequenceFile) {
-              throw new HiveException("Cannot load text files into a table stored as SequenceFile.");
-            }
-            if (fileIsSequenceFile && !tableIsSequenceFile) {
-              throw new HiveException("Cannot load SequenceFiles into a table stored as TextFile.");
-            }
-          }
-        }           
+          HiveFileFormatUtils.checkInputFormat(fs, conf, tbd.getTable().getInputFileFormatClass(), files);
+        }
 
         if(tbd.getPartitionSpec().size() == 0) {
           db.loadTable(new Path(tbd.getSourceDir()), tbd.getTable().getTableName(), tbd.getReplace());

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java?rev=763879&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java Fri Apr 10 06:34:57 2009
@@ -0,0 +1,149 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.OutputFormat;
+import org.apache.hadoop.mapred.SequenceFileInputFormat;
+import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+
+/**
+ * An util class for various Hive file format tasks. {@link
+ * registerOutputFormatSubstitute( Class<? extends OutputFormat>, Class<?
+ * extends HiveOutputFormat>)} and {@link getOutputFormatSubstitute(Class<?
+ * extends OutputFormat> )} is added for backward compatibility. They find the
+ * newly added HiveOutputFormat for older ones.
+ * 
+ */
+public class HiveFileFormatUtils {
+
+  static {
+    outputFormatSubstituteMap = new HashMap<Class<? extends OutputFormat>, Class<? extends HiveOutputFormat>>();
+    HiveFileFormatUtils.registerOutputFormatSubstitute(
+        IgnoreKeyTextOutputFormat.class, HiveIgnoreKeyTextOutputFormat.class);
+    HiveFileFormatUtils.registerOutputFormatSubstitute(
+        SequenceFileOutputFormat.class, HiveSequenceFileOutputFormat.class);
+  }
+
+  @SuppressWarnings("unchecked")
+  private static Map<Class<? extends OutputFormat>, Class<? extends HiveOutputFormat>> outputFormatSubstituteMap;
+
+  /**
+   * register a substitute
+   * 
+   * @param origin
+   *          the class that need to be substituted
+   * @param substitute
+   */
+  @SuppressWarnings("unchecked")
+  public synchronized static void registerOutputFormatSubstitute(
+      Class<? extends OutputFormat> origin,
+      Class<? extends HiveOutputFormat> substitute) {
+    outputFormatSubstituteMap.put(origin, substitute);
+  }
+
+  /**
+   * get a OutputFormat's substitute HiveOutputFormat
+   * 
+   * @param origin
+   * @return
+   */
+  @SuppressWarnings("unchecked")
+  public synchronized static Class<? extends HiveOutputFormat> getOutputFormatSubstitute(
+      Class<?> origin) {
+    if (HiveOutputFormat.class.isAssignableFrom(origin))
+      return (Class<? extends HiveOutputFormat>) origin;
+    Class<? extends HiveOutputFormat> result = outputFormatSubstituteMap
+        .get(origin);
+    return result;
+  }
+
+  /**
+   * get the final output path of a given FileOutputFormat.
+   * 
+   * @param parent
+   *          parent dir of the expected final output path
+   * @param jc
+   *          job configuration
+   * @param hiveOutputFormat
+   * @param isCompressed
+   * @param defaultFinalPath
+   * @return
+   * @throws IOException
+   */
+  public static Path getOutputFormatFinalPath(Path parent, JobConf jc,
+      HiveOutputFormat<?, ?> hiveOutputFormat, boolean isCompressed,
+      Path defaultFinalPath) throws IOException {
+    if (hiveOutputFormat instanceof HiveIgnoreKeyTextOutputFormat) {
+      return new Path(parent, Utilities.getTaskId(jc)
+          + Utilities.getFileExtension(jc, isCompressed));
+    }
+    return defaultFinalPath;
+  }
+
+  /**
+   * checks if files are in same format as the given input format
+   * 
+   * @param fs
+   * @param conf
+   * @param inputFormatCls
+   * @param files
+   * @throws HiveException
+   */
+  public static boolean checkInputFormat(FileSystem fs, HiveConf conf,
+      Class<? extends InputFormat> inputFormatCls, ArrayList<FileStatus> files)
+      throws HiveException {
+    if (files.size() > 0) {
+      boolean tableIsSequenceFile = inputFormatCls
+          .equals(SequenceFileInputFormat.class);
+      int fileId = 0;
+      boolean fileIsSequenceFile = true;
+      try {
+        SequenceFile.Reader reader = new SequenceFile.Reader(fs, files.get(
+            fileId).getPath(), conf);
+        reader.close();
+      } catch (IOException e) {
+        fileIsSequenceFile = false;
+      }
+      if (!fileIsSequenceFile && tableIsSequenceFile) {
+        throw new HiveException(
+            "Cannot load text files into a table stored as SequenceFile.");
+      }
+      if (fileIsSequenceFile && !tableIsSequenceFile) {
+        throw new HiveException(
+            "Cannot load SequenceFiles into a table stored as TextFile.");
+      }
+      return true;
+    }
+    return false;
+  }
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java?rev=763879&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveIgnoreKeyTextOutputFormat.java Fri Apr 10 06:34:57 2009
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Properties;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.TextOutputFormat;
+import org.apache.hadoop.util.Progressable;
+
+/**
+ * HiveIgnoreKeyTextOutputFormat replaces key with null before feeding the <key,
+ * value> to TextOutputFormat.RecordWriter.
+ * 
+ */
+public class HiveIgnoreKeyTextOutputFormat<K extends WritableComparable, V extends Writable>
+    extends TextOutputFormat<K, V> implements HiveOutputFormat<K, V> {
+
+  /**
+   * create the final out file, and output row by row. After one row is
+   * appended, a configured row separator is appended
+   * 
+   * @param jc
+   *          the job configuration file
+   * @param finalOutPath
+   *          the final output file to be created
+   * @param valueClass
+   *          the value class used for create
+   * @param isCompressed
+   *          whether the content is compressed or not
+   * @param tableInfo
+   *          the tableInfo of this file's corresponding table
+   * @param progress
+   *          progress used for status report
+   * @return
+   * @throws IOException
+   */
+  @Override
+  public RecordWriter getHiveRecordWriter(JobConf jc, Path outPath,
+      Class<? extends Writable> valueClass, boolean isCompressed,
+      Properties tableProperties, Progressable progress) throws IOException {
+    int rowSeparator = 0;
+    String rowSeparatorString = tableProperties.getProperty(
+        Constants.LINE_DELIM, "\n");
+    try {
+      rowSeparator = Byte.parseByte(rowSeparatorString);
+    } catch (NumberFormatException e) {
+      rowSeparator = rowSeparatorString.charAt(0);
+    }
+
+    final int finalRowSeparator = rowSeparator;
+    final OutputStream outStream = Utilities.createCompressedStream(jc,
+        FileSystem.get(jc).create(outPath), isCompressed);
+    return new RecordWriter() {
+      public void write(Writable r) throws IOException {
+        if (r instanceof Text) {
+          Text tr = (Text) r;
+          outStream.write(tr.getBytes(), 0, tr.getLength());
+          outStream.write(finalRowSeparator);
+        } else {
+          // DynamicSerDe always writes out BytesWritable
+          BytesWritable bw = (BytesWritable) r;
+          outStream.write(bw.get(), 0, bw.getSize());
+          outStream.write(finalRowSeparator);
+        }
+      }
+
+      public void close(boolean abort) throws IOException {
+        outStream.close();
+      }
+    };
+  }
+
+  protected static class IgnoreKeyWriter<K extends WritableComparable, V extends Writable>
+      implements org.apache.hadoop.mapred.RecordWriter<K, V> {
+
+    private org.apache.hadoop.mapred.RecordWriter<K, V> mWriter;
+
+    public IgnoreKeyWriter(org.apache.hadoop.mapred.RecordWriter<K, V> writer) {
+      this.mWriter = writer;
+    }
+
+    public synchronized void write(K key, V value) throws IOException {
+      this.mWriter.write(null, value);
+    }
+
+    public void close(Reporter reporter) throws IOException {
+      this.mWriter.close(reporter);
+    }
+  }
+
+  public org.apache.hadoop.mapred.RecordWriter<K, V> getRecordWriter(
+      FileSystem ignored, JobConf job, String name, Progressable progress)
+      throws IOException {
+
+    return new IgnoreKeyWriter<K, V>(super.getRecordWriter(ignored, job, name,
+        progress));
+  }
+
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveOutputFormat.java?rev=763879&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveOutputFormat.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveOutputFormat.java Fri Apr 10 06:34:57 2009
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.io.IOException;
+import java.util.Properties;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.util.Progressable;
+
+/**
+ * <code>HiveOutputFormat</code> describes the output-specification for Hive's
+ * operators. It has
+ * <p>
+ * Add a {@link getHiveRecordWriter(JobConf, Path, final Class<? extends
+ * Writable>, boolean, tableProperties, Progressable)}, with various parameters
+ * used to create the final out file and get some specific settings.
+ * 
+ * 
+ * @see OutputFormat
+ * @see RecordWriter
+ * @see JobConf
+ */
+public interface HiveOutputFormat<K extends WritableComparable, V extends Writable> {
+
+  /**
+   * create the final out file and get some specific settings.
+   * 
+   * @param jc
+   *          the job configuration file
+   * @param finalOutPath
+   *          the final output file to be created
+   * @param valueClass
+   *          the value class used for create
+   * @param isCompressed
+   *          whether the content is compressed or not
+   * @param tableProperties
+   *          the table properties of this file's corresponding table
+   * @param progress
+   *          progress used for status report
+   * @return
+   * @throws IOException
+   */
+  public RecordWriter getHiveRecordWriter(JobConf jc, Path finalOutPath,
+      final Class<? extends Writable> valueClass, boolean isCompressed,
+      Properties tableProperties, Progressable progress) throws IOException;
+
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java?rev=763879&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveSequenceFileOutputFormat.java Fri Apr 10 06:34:57 2009
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.io.IOException;
+import java.util.Properties;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+import org.apache.hadoop.util.Progressable;
+
+/** A {@link HiveOutputFormat} that writes {@link SequenceFile}s. */
+public class HiveSequenceFileOutputFormat extends
+    SequenceFileOutputFormat implements
+    HiveOutputFormat<WritableComparable, Writable> {
+
+  BytesWritable EMPTY_KEY = new BytesWritable();
+
+  /**
+   * create the final out file, and output an empty key as the key
+   * 
+   * @param jc
+   *          the job configuration file
+   * @param finalOutPath
+   *          the final output file to be created
+   * @param valueClass
+   *          the value class used for create
+   * @param isCompressed
+   *          whether the content is compressed or not
+   * @param tableInfo
+   *          the tableInfo of this file's corresponding table
+   * @param progress
+   *          progress used for status report
+   * @return
+   * @throws IOException
+   */
+  @Override
+  public RecordWriter getHiveRecordWriter(JobConf jc, Path finalOutPath,
+      Class<? extends Writable> valueClass, boolean isCompressed,
+      Properties tableProperties, Progressable progress) throws IOException {
+
+    final SequenceFile.Writer outStream = Utilities.createSequenceWriter(jc,
+        FileSystem.get(jc), finalOutPath, BytesWritable.class, valueClass,
+        isCompressed);
+
+    return new RecordWriter() {
+      public void write(Writable r) throws IOException {
+        outStream.append(EMPTY_KEY, r);
+      }
+
+      public void close(boolean abort) throws IOException {
+        outStream.close();
+      }
+    };
+  }
+
+}

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IgnoreKeyTextOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IgnoreKeyTextOutputFormat.java?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IgnoreKeyTextOutputFormat.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IgnoreKeyTextOutputFormat.java Fri Apr 10 06:34:57 2009
@@ -32,7 +32,8 @@
 /**
  * This class replaces key with null before feeding the <key, value> 
  * to TextOutputFormat.RecordWriter.
- *   
+ * 
+ * @deprecated use {@link HiveIgnoreKeyTextOutputFormat} instead}
  */
 public class IgnoreKeyTextOutputFormat<K extends WritableComparable, 
                                        V extends Writable> 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Fri Apr 10 06:34:57 2009
@@ -55,6 +55,8 @@
 import org.apache.hadoop.mapred.OutputFormat;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
+import org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat;
 
 import com.facebook.thrift.TException;
 import com.facebook.thrift.protocol.TBinaryProtocol;
@@ -168,7 +170,7 @@
    * @throws HiveException thrown if the args are invalid or if the metadata or the data directory couldn't be created
    */
   public void createTable(String tableName, List<String> columns, List<String> partCols,
-      Class<? extends InputFormat> fileInputFormat, Class<? extends OutputFormat> fileOutputFormat) throws HiveException {
+      Class<? extends InputFormat> fileInputFormat, Class<?> fileOutputFormat) throws HiveException {
     this.createTable(tableName, columns, partCols, fileInputFormat, fileOutputFormat, -1, null);
   }
 
@@ -183,7 +185,7 @@
    * @throws HiveException thrown if the args are invalid or if the metadata or the data directory couldn't be created
    */
   public void createTable(String tableName, List<String> columns, List<String> partCols,
-      Class<? extends InputFormat> fileInputFormat, Class<? extends OutputFormat> fileOutputFormat, int bucketCount, List<String> bucketCols) throws HiveException {
+      Class<? extends InputFormat> fileInputFormat, Class<?> fileOutputFormat, int bucketCount, List<String> bucketCols) throws HiveException {
     if(columns == null) {
       throw new HiveException("columns not specified for table " + tableName);
     }
@@ -422,9 +424,9 @@
       table.setInputFormatClass((Class<? extends InputFormat<WritableComparable, Writable>>)
           Class.forName(table.getSchema().getProperty(org.apache.hadoop.hive.metastore.api.Constants.FILE_INPUT_FORMAT,
               org.apache.hadoop.mapred.SequenceFileInputFormat.class.getName())));
-      table.setOutputFormatClass((Class<? extends OutputFormat<WritableComparable, Writable>>)
+      table.setOutputFormatClass((Class<? extends HiveOutputFormat>)
           Class.forName(table.getSchema().getProperty(org.apache.hadoop.hive.metastore.api.Constants.FILE_OUTPUT_FORMAT,
-              org.apache.hadoop.mapred.SequenceFileOutputFormat.class.getName()))); 
+              HiveSequenceFileOutputFormat.class.getName()))); 
       table.setDeserializer(MetaStoreUtils.getDeserializer(getConf(), p));
       table.setDataLocation(new URI(tTable.getSd().getLocation()));
     } catch(Exception e) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java Fri Apr 10 06:34:57 2009
@@ -40,6 +40,8 @@
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
+import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.Deserializer;
@@ -65,7 +67,7 @@
   private Deserializer deserializer;
   private URI uri;
   private Class<? extends InputFormat> inputFormatClass;
-  private Class<? extends OutputFormat> outputFormatClass;
+  private Class<? extends HiveOutputFormat> outputFormatClass;
   private org.apache.hadoop.hive.metastore.api.Table tTable;
 
   /**
@@ -90,7 +92,7 @@
    */
   public Table(String name, Properties schema, Deserializer deserializer, 
       Class<? extends InputFormat<?, ?>> inputFormatClass,
-      Class<? extends OutputFormat<?, ?>> outputFormatClass,
+      Class<?> outputFormatClass,
       URI dataLocation, Hive hive) throws HiveException {
     initEmpty();
     this.schema = schema;
@@ -99,7 +101,7 @@
     getTTable().setTableName(name);
     getSerdeInfo().setSerializationLib(deserializer.getClass().getName());
     setInputFormatClass(inputFormatClass);
-    setOutputFormatClass(outputFormatClass);
+    setOutputFormatClass(HiveFileFormatUtils.getOutputFormatSubstitute(outputFormatClass));
     setDataLocation(dataLocation);
   }
   
@@ -203,11 +205,11 @@
   }
 
   /**
-   * @param outputFormatClass 
+   * @param class1 
    */
-  public void setOutputFormatClass(Class<? extends OutputFormat> outputFormatClass) {
-    this.outputFormatClass = outputFormatClass;
-    tTable.getSd().setOutputFormat(outputFormatClass.getName());
+  public void setOutputFormatClass(Class<?> class1) {
+    this.outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(class1);
+    tTable.getSd().setOutputFormat(class1.getName());
   }
 
   final public Properties getSchema()  {
@@ -241,7 +243,7 @@
     return inputFormatClass;
   }
 
-  final public Class<? extends OutputFormat> getOutputFormatClass() {
+  final public Class<? extends HiveOutputFormat> getOutputFormatClass() {
     return outputFormatClass;
   }
 
@@ -478,7 +480,8 @@
 
   public void setOutputFormatClass(String name) throws HiveException {
     try {
-      setOutputFormatClass((Class<? extends OutputFormat<WritableComparable, Writable>>)Class.forName(name));
+      Class<?> origin = Class.forName(name);
+      setOutputFormatClass(HiveFileFormatUtils.getOutputFormatSubstitute(origin));
     } catch (ClassNotFoundException e) {
       throw new HiveException("Class not found: " + name, e);
     }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Fri Apr 10 06:34:57 2009
@@ -40,6 +40,8 @@
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
 import org.apache.hadoop.hive.ql.plan.MsckDesc;
+import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
+import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.plan.DDLWork;
 import org.apache.hadoop.hive.ql.plan.alterTableDesc;
@@ -268,6 +270,15 @@
       return;
     }
     
+    try {
+      Class<?> origin = Class.forName(crtTblDesc.getOutputFormat());
+      Class<? extends HiveOutputFormat> replaced = HiveFileFormatUtils.getOutputFormatSubstitute(origin);
+      if(replaced == null)
+        throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE.getMsg());
+    } catch (ClassNotFoundException e) {
+      throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE.getMsg());
+    }
+    
     Iterator<FieldSchema> iterCols = crtTblDesc.getCols().iterator();
     List<String> colNames = new ArrayList<String>();
     while (iterCols.hasNext()) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java Fri Apr 10 06:34:57 2009
@@ -68,7 +68,7 @@
   CLUSTERBY_SORTBY_CONFLICT("Cannot have both Cluster By and Sort By Clauses"),
   UNION_NOTIN_SUBQ("Top level Union is not supported currently; use a subquery for the union"),
   INVALID_INPUT_FORMAT_TYPE("Input Format must implement InputFormat"),
-  INVALID_OUTPUT_FORMAT_TYPE("Output Format must implement OutputFormat"),
+  INVALID_OUTPUT_FORMAT_TYPE("Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat"),
   NON_BUCKETED_TABLE("Sampling Expression Needed for Non-Bucketed Table");
 
   private String mesg;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri Apr 10 06:34:57 2009
@@ -53,6 +53,7 @@
 import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
@@ -693,7 +694,7 @@
         case HiveParser.TOK_TAB: {
           tableSpec ts = new tableSpec(this.db, ast, true);
 
-          if (!OutputFormat.class.isAssignableFrom(ts.tableHandle.getOutputFormatClass()))
+          if (!HiveOutputFormat.class.isAssignableFrom(ts.tableHandle.getOutputFormatClass()))
             throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE.getMsg(ast));
 
           if(ts.partSpec == null) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java Fri Apr 10 06:34:57 2009
@@ -22,6 +22,8 @@
 
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.OutputFormat;
+import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
+import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDe;
 
@@ -29,21 +31,22 @@
   private static final long serialVersionUID = 1L;
   private Class<? extends Deserializer> deserializerClass;
   private Class<? extends InputFormat> inputFileFormatClass;
-  private Class<? extends OutputFormat> outputFileFormatClass;
+  private Class<? extends HiveOutputFormat> outputFileFormatClass;
   private java.util.Properties properties;
   private String serdeClassName;
   public tableDesc() { }
   public tableDesc(
       final Class<? extends Deserializer> serdeClass,
       final Class<? extends InputFormat> inputFileFormatClass,
-      final Class<? extends OutputFormat> class1,
+      final Class<?> class1,
       final java.util.Properties properties) {
     this.deserializerClass = serdeClass;
     this.inputFileFormatClass = inputFileFormatClass;
-    this.outputFileFormatClass = class1;
+    this.outputFileFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(class1);
     this.properties = properties;
     this.serdeClassName = properties.getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB);;
   }
+  
   public Class<? extends Deserializer> getDeserializerClass() {
     return this.deserializerClass;
   }
@@ -64,11 +67,11 @@
   public void setInputFileFormatClass(final Class<? extends InputFormat> inputFileFormatClass) {
     this.inputFileFormatClass=inputFileFormatClass;
   }
-  public Class<? extends OutputFormat> getOutputFileFormatClass() {
+  public Class<? extends HiveOutputFormat> getOutputFileFormatClass() {
     return this.outputFileFormatClass;
   }
-  public void setOutputFileFormatClass(final Class<? extends OutputFormat> outputFileFormatClass) {
-    this.outputFileFormatClass=outputFileFormatClass;
+  public void setOutputFileFormatClass(final Class<?> outputFileFormatClass) {
+    this.outputFileFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(outputFileFormatClass);
   }
   
   @explain(displayName="properties", normalExplain=false)

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java Fri Apr 10 06:34:57 2009
@@ -13,6 +13,7 @@
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
@@ -93,7 +94,7 @@
     Table table = new Table(tableName);
     table.getTTable().setDbName(dbName);
     table.setInputFormatClass(TextInputFormat.class);
-    table.setOutputFormatClass(TextOutputFormat.class);
+    table.setOutputFormatClass(IgnoreKeyTextOutputFormat.class);
 
     hive.createTable(table);
     // now we've got a table, check that it works
@@ -162,7 +163,7 @@
     Table table = new Table(tableName);
     table.getTTable().setDbName(dbName);
     table.setInputFormatClass(TextInputFormat.class);
-    table.setOutputFormatClass(TextOutputFormat.class);
+    table.setOutputFormatClass(IgnoreKeyTextOutputFormat.class);
     table.setPartCols(partCols);
 
     hive.createTable(table);

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/create_insert_outputformat.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/create_insert_outputformat.q?rev=763879&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/create_insert_outputformat.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/create_insert_outputformat.q Fri Apr 10 06:34:57 2009
@@ -0,0 +1,11 @@
+DROP TABLE table_test_output_fomat;
+
+CREATE TABLE table_test_output_format(key INT, value STRING) STORED AS
+  INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
+  OUTPUTFORMAT 'org.apache.hadoop.mapred.MapFileOutputFormat';
+
+FROM src
+INSERT OVERWRITE TABLE table_test_output_format SELECT src.key, src.value LIMIT 10;
+
+describe table_test_output_format;
+DROP TABLE table_test_output_format;

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_insert_outputformat.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_insert_outputformat.q?rev=763879&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_insert_outputformat.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_insert_outputformat.q Fri Apr 10 06:34:57 2009
@@ -0,0 +1,30 @@
+DROP TABLE table_test_output_format;
+
+CREATE TABLE table_test_output_format(key INT, value STRING) STORED AS
+  INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
+  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat';
+
+FROM src
+INSERT OVERWRITE TABLE table_test_output_format SELECT src.key, src.value LIMIT 10;
+describe table_test_output_format;
+DROP TABLE table_test_output_format;
+
+DROP TABLE table_test_output_format_sequencefile;
+CREATE TABLE table_test_output_format_sequencefile(key INT, value STRING) STORED AS
+  INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
+  OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat';
+
+FROM src
+INSERT OVERWRITE TABLE table_test_output_format_sequencefile SELECT src.key, src.value LIMIT 10;
+describe table_test_output_format_sequencefile;
+DROP TABLE table_test_output_format_sequencefile;
+
+DROP TABLE table_test_output_format_hivesequencefile;
+CREATE TABLE table_test_output_format_hivesequencefile(key INT, value STRING) STORED AS
+  INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
+  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat';
+
+FROM src
+INSERT OVERWRITE TABLE table_test_output_format_hivesequencefile SELECT src.key, src.value LIMIT 10;
+describe table_test_output_format_hivesequencefile;
+DROP TABLE table_test_output_format_hivesequencefile;

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out?rev=763879&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out Fri Apr 10 06:34:57 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_bad_class.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_bad_class.q.out?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_bad_class.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_bad_class.q.out Fri Apr 10 06:34:57 2009
@@ -1,2 +1 @@
-FAILED: Error in metadata: Class not found: ClassDoesNotExist
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
+FAILED: Error in semantic analysis: Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_output.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_output.q.out?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_output.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_output.q.out Fri Apr 10 06:34:57 2009
@@ -1 +1 @@
-FAILED: Error in semantic analysis: line 4:23 Output Format must implement OutputFormat dest1
+FAILED: Error in semantic analysis: Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out Fri Apr 10 06:34:57 2009
@@ -20,13 +20,13 @@
                 command: ../data/scripts/error_script
                 output info:
                     input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
Files hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out (original) and hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out Fri Apr 10 06:34:57 2009 differ

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out Fri Apr 10 06:34:57 2009
@@ -31,7 +31,7 @@
                     GlobalTableId: 1
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: dest1
 
@@ -41,7 +41,7 @@
             replace: true
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out Fri Apr 10 06:34:57 2009
@@ -39,7 +39,7 @@
                     GlobalTableId: 1
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         name: dest1
 
@@ -49,7 +49,7 @@
             replace: true
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out Fri Apr 10 06:34:57 2009
@@ -41,7 +41,7 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator
@@ -92,7 +92,7 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator
@@ -143,7 +143,7 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator
@@ -194,7 +194,7 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator
@@ -245,7 +245,7 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator
@@ -296,7 +296,7 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator
@@ -347,7 +347,7 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator
@@ -404,7 +404,7 @@
                 GlobalTableId: 0
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator
@@ -480,13 +480,13 @@
                 GlobalTableId: 0
                 table:
                     input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                     name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/njain/hive2/hive/build/ql/tmp/596968299/28368921.10002 
+        /Users/char/Documents/workspace/Hive/build/ql/tmp/184203761/832590546.10002 
           Reduce Output Operator
             key expressions:
                   expr: 1
@@ -510,7 +510,7 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator
@@ -586,13 +586,13 @@
                 GlobalTableId: 0
                 table:
                     input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                     name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/njain/hive2/hive/build/ql/tmp/12416576/480017656.10002 
+        /Users/char/Documents/workspace/Hive/build/ql/tmp/795329052/20301787.10002 
           Reduce Output Operator
             key expressions:
                   expr: 1
@@ -618,7 +618,7 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator
@@ -694,13 +694,13 @@
                 GlobalTableId: 0
                 table:
                     input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                     name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/njain/hive2/hive/build/ql/tmp/15199144/1962713711.10002 
+        /Users/char/Documents/workspace/Hive/build/ql/tmp/222467858/661588658.10002 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -726,7 +726,7 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator
@@ -802,13 +802,13 @@
                 GlobalTableId: 0
                 table:
                     input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                     name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/njain/hive2/hive/build/ql/tmp/103416384/41425981.10002 
+        /Users/char/Documents/workspace/Hive/build/ql/tmp/985824615/11059261.10002 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -832,7 +832,7 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator
@@ -922,7 +922,7 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 
   Stage: Stage-0
     Fetch Operator

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/create_insert_outputformat.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/create_insert_outputformat.q.out?rev=763879&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/create_insert_outputformat.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/create_insert_outputformat.q.out Fri Apr 10 06:34:57 2009
@@ -0,0 +1,6 @@
+key	int	
+value	string	
+key	int	
+value	string	
+key	int	
+value	string	

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out Fri Apr 10 06:34:57 2009
@@ -36,13 +36,13 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                 name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/zshao/sync/apache-trunk-HIVE-337-trunk/build/ql/tmp/383864989/212664797.10001 
+        /Users/char/Documents/workspace/Hive/build/ql/tmp/381076141/85086004.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -80,7 +80,7 @@
                 GlobalTableId: 1
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest_g1
 
@@ -90,7 +90,7 @@
             replace: true
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest_g1
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out Fri Apr 10 06:34:57 2009
@@ -50,13 +50,13 @@
                 GlobalTableId: 0
                 table:
                     input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                     name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/zshao/sync/apache-trunk-HIVE-337-trunk/build/ql/tmp/1330446638/19184728.10001 
+        /Users/char/Documents/workspace/Hive/build/ql/tmp/759704963/795138624.10001 
           Reduce Output Operator
             sort order: 
             tag: -1
@@ -79,7 +79,7 @@
                 GlobalTableId: 1
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
@@ -89,7 +89,7 @@
             replace: true
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out Fri Apr 10 06:34:57 2009
@@ -54,7 +54,7 @@
                 GlobalTableId: 1
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
@@ -64,7 +64,7 @@
             replace: true
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_nomap.q.out Fri Apr 10 06:34:57 2009
@@ -54,7 +54,7 @@
                 GlobalTableId: 1
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
@@ -64,7 +64,7 @@
             replace: true
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map_skew.q.out Fri Apr 10 06:34:57 2009
@@ -43,13 +43,13 @@
             GlobalTableId: 0
             table:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                 name: binary_table
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/zshao/sync/apache-trunk-HIVE-337-trunk/build/ql/tmp/133273148/247233082.10001 
+        /Users/char/Documents/workspace/Hive/build/ql/tmp/458951137/31037752.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -87,7 +87,7 @@
                 GlobalTableId: 1
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
 
@@ -97,7 +97,7 @@
             replace: true
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest1
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out?rev=763879&r1=763878&r2=763879&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_noskew.q.out Fri Apr 10 06:34:57 2009
@@ -47,7 +47,7 @@
                 GlobalTableId: 1
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest_g1
 
@@ -57,7 +57,7 @@
             replace: true
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 name: dest_g1