You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2009/12/23 00:16:43 UTC

svn commit: r893343 - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/queries/clientnegative/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientnegative/ ql/src/test/results/clientpositive/

Author: namit
Date: Tue Dec 22 23:16:41 2009
New Revision: 893343

URL: http://svn.apache.org/viewvc?rev=893343&view=rev
Log:
HIVE-302 Check that lines terminated by can only be "\n"
(Zheng Shao via namit)


Added:
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/line_terminator.q
    hadoop/hive/trunk/ql/src/test/results/clientnegative/line_terminator.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/ctas.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ctas.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=893343&r1=893342&r2=893343&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Tue Dec 22 23:16:41 2009
@@ -357,6 +357,9 @@
     HIVE-927 Check the schema for both inputs of union
     (He Yongqiang via namit)
 
+    HIVE-302 Check that lines terminated by can only be "\n"
+    (Zheng Shao via namit)
+
 Release 0.4.0 -  Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java?rev=893343&r1=893342&r2=893343&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java Tue Dec 22 23:16:41 2009
@@ -100,6 +100,7 @@
   BUCKETED_NUMBERATOR_BIGGER_DENOMINATOR("Numberator should not be bigger than denaminator in sample clause for Table"),
   NEED_PARTITION_ERROR("need to specify partition columns because the destination table is partitioned."),
   CTAS_CTLT_COEXISTENCE("Create table command does not allow LIKE and AS-SELECT in the same command"),
+  LINES_TERMINATED_BY_NON_NEWLINE("LINES TERMINATED BY only supports newline '\\n' right now"),
   CTAS_COLLST_COEXISTENCE("Create table as select command cannot specify the list of columns for the target table."),
   CTLT_COLLST_COEXISTENCE("Create table like command cannot specify the list of columns for the target table."),
   INVALID_SELECT_SCHEMA("Cannot derive schema from the select-clause."),

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=893343&r1=893342&r2=893343&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Tue Dec 22 23:16:41 2009
@@ -321,7 +321,7 @@
         if (expr == null) {
           expr = value;
         } else {
-          throw new SemanticException(ErrorMsg.UNSUPPORTED_MULTIPLE_DISTINCTS.getMsg(expr));
+          throw new SemanticException(ErrorMsg.UNSUPPORTED_MULTIPLE_DISTINCTS.getMsg());
         }
       }
     }
@@ -1144,7 +1144,11 @@
           tblDesc.getProperties().setProperty(Constants.MAPKEY_DELIM, unescapeSQLString(rowChild.getChild(0).getText()));
           break;
         case HiveParser.TOK_TABLEROWFORMATLINES:
-          tblDesc.getProperties().setProperty(Constants.LINE_DELIM, unescapeSQLString(rowChild.getChild(0).getText()));
+          String lineDelim = unescapeSQLString(rowChild.getChild(0).getText());
+          tblDesc.getProperties().setProperty(Constants.LINE_DELIM, lineDelim);
+          if (!lineDelim.equals("\n") && !lineDelim.equals("10")) {
+            throw new SemanticException(ErrorMsg.LINES_TERMINATED_BY_NON_NEWLINE.getMsg());
+          }
           break;
         default: assert false;
         }
@@ -5467,6 +5471,9 @@
                 break;
               case HiveParser.TOK_TABLEROWFORMATLINES:
                 lineDelim = unescapeSQLString(rowChild.getChild(0).getText());
+                if (!lineDelim.equals("\n") && !lineDelim.equals("10")) {
+                  throw new SemanticException(ErrorMsg.LINES_TERMINATED_BY_NON_NEWLINE.getMsg());
+                }
                 break;
               default: assert false;
             }

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/line_terminator.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/line_terminator.q?rev=893343&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/line_terminator.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/line_terminator.q Tue Dec 22 23:16:41 2009
@@ -0,0 +1,3 @@
+CREATE TABLE mytable (col1 STRING, col2 INT)
+ROW FORMAT DELIMITED
+LINES TERMINATED BY ',';

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/ctas.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/ctas.q?rev=893343&r1=893342&r2=893343&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/ctas.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/ctas.q Tue Dec 22 23:16:41 2009
@@ -41,9 +41,9 @@
 
 select * from nzhang_ctas4;
 
-explain extended create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '.' stored as textfile as select key, value from src sort by key, value limit 10;
+explain extended create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '\012' stored as textfile as select key, value from src sort by key, value limit 10;
 
-create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '.' stored as textfile as select key, value from src sort by key, value limit 10;
+create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '\012' stored as textfile as select key, value from src sort by key, value limit 10;
 
 drop table nzhang_ctas1;
 drop table nzhang_ctas2;

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/line_terminator.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/line_terminator.q.out?rev=893343&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/line_terminator.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/line_terminator.q.out Tue Dec 22 23:16:41 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: LINES TERMINATED BY only supports newline '\n' right now

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/ctas.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/ctas.q.out?rev=893343&r1=893342&r2=893343&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/ctas.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/ctas.q.out Tue Dec 22 23:16:41 2009
@@ -26,11 +26,11 @@
 PREHOOK: query: select * from nzhang_Tmp
 PREHOOK: type: QUERY
 PREHOOK: Input: default@nzhang_tmp
-PREHOOK: Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/366288370/10000
+PREHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1149102583/10000
 POSTHOOK: query: select * from nzhang_Tmp
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@nzhang_tmp
-POSTHOOK: Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/366288370/10000
+POSTHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1149102583/10000
 PREHOOK: query: explain create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10
 PREHOOK: type: CREATETABLE
 POSTHOOK: query: explain create table nzhang_CTAS1 as select key k, value from src sort by k, value limit 10
@@ -84,7 +84,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1667899697/10002 
+        file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/317200131/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -112,7 +112,7 @@
     Move Operator
       files:
           hdfs directory: true
-          destination: file:///data/users/njain/hive_commit1/hive_commit1/ql/../build/ql/test/data/warehouse/nzhang_ctas1
+          destination: file:///data/users/zshao/tools/deploy-trunk-apache-hive/ql/../build/ql/test/data/warehouse/nzhang_ctas1
 
   Stage: Stage-3
       Create Table Operator:
@@ -136,11 +136,11 @@
 PREHOOK: query: select * from nzhang_CTAS1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@nzhang_ctas1
-PREHOOK: Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/393574144/10000
+PREHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1963223053/10000
 POSTHOOK: query: select * from nzhang_CTAS1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@nzhang_ctas1
-POSTHOOK: Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/393574144/10000
+POSTHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1963223053/10000
 0	val_0
 0	val_0
 0	val_0
@@ -204,7 +204,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/817459050/10002 
+        file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/88035336/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -232,7 +232,7 @@
     Move Operator
       files:
           hdfs directory: true
-          destination: file:///data/users/njain/hive_commit1/hive_commit1/ql/../build/ql/test/data/warehouse/nzhang_ctas2
+          destination: file:///data/users/zshao/tools/deploy-trunk-apache-hive/ql/../build/ql/test/data/warehouse/nzhang_ctas2
 
   Stage: Stage-3
       Create Table Operator:
@@ -256,11 +256,11 @@
 PREHOOK: query: select * from nzhang_ctas2
 PREHOOK: type: QUERY
 PREHOOK: Input: default@nzhang_ctas2
-PREHOOK: Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1000701348/10000
+PREHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/16284527/10000
 POSTHOOK: query: select * from nzhang_ctas2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@nzhang_ctas2
-POSTHOOK: Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1000701348/10000
+POSTHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/16284527/10000
 0	val_0
 0	val_0
 0	val_0
@@ -324,7 +324,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/2111016297/10002 
+        file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1798106048/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -352,7 +352,7 @@
     Move Operator
       files:
           hdfs directory: true
-          destination: file:///data/users/njain/hive_commit1/hive_commit1/ql/../build/ql/test/data/warehouse/nzhang_ctas3
+          destination: file:///data/users/zshao/tools/deploy-trunk-apache-hive/ql/../build/ql/test/data/warehouse/nzhang_ctas3
 
   Stage: Stage-3
       Create Table Operator:
@@ -377,11 +377,11 @@
 PREHOOK: query: select * from nzhang_ctas3
 PREHOOK: type: QUERY
 PREHOOK: Input: default@nzhang_ctas3
-PREHOOK: Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1714777571/10000
+PREHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1146556976/10000
 POSTHOOK: query: select * from nzhang_ctas3
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@nzhang_ctas3
-POSTHOOK: Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1714777571/10000
+POSTHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1146556976/10000
 0.0	val_0_con
 0.0	val_0_con
 0.0	val_0_con
@@ -410,11 +410,11 @@
 PREHOOK: query: select * from nzhang_ctas3
 PREHOOK: type: QUERY
 PREHOOK: Input: default@nzhang_ctas3
-PREHOOK: Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1253058565/10000
+PREHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/424511027/10000
 POSTHOOK: query: select * from nzhang_ctas3
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@nzhang_ctas3
-POSTHOOK: Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1253058565/10000
+POSTHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/424511027/10000
 0.0	val_0_con
 0.0	val_0_con
 0.0	val_0_con
@@ -478,7 +478,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/1165105243/10002 
+        file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/257132629/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -506,7 +506,7 @@
     Move Operator
       files:
           hdfs directory: true
-          destination: file:///data/users/njain/hive_commit1/hive_commit1/ql/../build/ql/test/data/warehouse/nzhang_ctas4
+          destination: file:///data/users/zshao/tools/deploy-trunk-apache-hive/ql/../build/ql/test/data/warehouse/nzhang_ctas4
 
   Stage: Stage-3
       Create Table Operator:
@@ -531,11 +531,11 @@
 PREHOOK: query: select * from nzhang_ctas4
 PREHOOK: type: QUERY
 PREHOOK: Input: default@nzhang_ctas4
-PREHOOK: Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/470106969/10000
+PREHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/466331584/10000
 POSTHOOK: query: select * from nzhang_ctas4
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@nzhang_ctas4
-POSTHOOK: Output: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/470106969/10000
+POSTHOOK: Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/466331584/10000
 0	val_0
 0	val_0
 0	val_0
@@ -546,12 +546,12 @@
 103	val_103
 104	val_104
 104	val_104
-PREHOOK: query: explain extended create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '.' stored as textfile as select key, value from src sort by key, value limit 10
+PREHOOK: query: explain extended create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '\012' stored as textfile as select key, value from src sort by key, value limit 10
 PREHOOK: type: CREATETABLE
-POSTHOOK: query: explain extended create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '.' stored as textfile as select key, value from src sort by key, value limit 10
+POSTHOOK: query: explain extended create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '\012' stored as textfile as select key, value from src sort by key, value limit 10
 POSTHOOK: type: CREATETABLE
 ABSTRACT SYNTAX TREE:
-  (TOK_CREATETABLE nzhang_ctas5 TOK_LIKETABLE (TOK_TABLEROWFORMAT (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD ',') (TOK_TABLEROWFORMATLINES '.'))) TOK_TBLTEXTFILE (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))) (TOK_LIMIT 10))))
+  (TOK_CREATETABLE nzhang_ctas5 TOK_LIKETABLE (TOK_TABLEROWFORMAT (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD ',') (TOK_TABLEROWFORMATLINES '\012'))) TOK_TBLTEXTFILE (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))) (TOK_LIMIT 10))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -588,9 +588,9 @@
                       type: string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/src [src]
+        file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src [src]
       Path -> Partition:
-        file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/src 
+        file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src 
           Partition
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -604,7 +604,8 @@
               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               file.inputformat org.apache.hadoop.mapred.TextInputFormat
               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              location file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/src
+              location file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src
+              transient_lastDdlTime 1261508605
             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -619,8 +620,8 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/njain/hive_commit1/hive_commit1/build/ql/test/data/warehouse/src
-                transient_lastDdlTime 1258053540
+                location file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src
+                transient_lastDdlTime 1261508605
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
             name: src
@@ -630,7 +631,7 @@
             File Output Operator
               compressed: false
               GlobalTableId: 0
-              directory: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/764617234/10002
+              directory: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/603500118/10002
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -642,7 +643,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/764617234/10002 
+        file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/603500118/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -658,9 +659,9 @@
                     type: string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/764617234/10002 [file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/764617234/10002]
+        file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/603500118/10002 [file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/603500118/10002]
       Path -> Partition:
-        file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/764617234/10002 
+        file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/603500118/10002 
           Partition
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -681,13 +682,14 @@
             File Output Operator
               compressed: false
               GlobalTableId: 0
-              directory: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/764617234/10001
+              directory: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/603500118/10001
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
                     field.delim ,
-                    line.delim .
+                    line.delim 
+
                     columns _col0,_col1
                     serialization.format ,
                     columns.types string:string
@@ -696,8 +698,8 @@
     Move Operator
       files:
           hdfs directory: true
-          source: file:/data/users/njain/hive_commit1/hive_commit1/build/ql/tmp/764617234/10001
-          destination: file:///data/users/njain/hive_commit1/hive_commit1/ql/../build/ql/test/data/warehouse/nzhang_ctas5
+          source: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/603500118/10001
+          destination: file:///data/users/zshao/tools/deploy-trunk-apache-hive/ql/../build/ql/test/data/warehouse/nzhang_ctas5
 
   Stage: Stage-3
       Create Table Operator:
@@ -706,17 +708,18 @@
           field delimiter: ,
           if not exists: false
           input format: org.apache.hadoop.mapred.TextInputFormat
-          line delimiter: .
+          line delimiter: 
+
           # buckets: -1
           output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
           name: nzhang_ctas5
           isExternal: false
 
 
-PREHOOK: query: create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '.' stored as textfile as select key, value from src sort by key, value limit 10
+PREHOOK: query: create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '\012' stored as textfile as select key, value from src sort by key, value limit 10
 PREHOOK: type: CREATETABLE
 PREHOOK: Input: default@src
-POSTHOOK: query: create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '.' stored as textfile as select key, value from src sort by key, value limit 10
+POSTHOOK: query: create table nzhang_ctas5 row format delimited fields terminated by ',' lines terminated by '\012' stored as textfile as select key, value from src sort by key, value limit 10
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@nzhang_ctas5