You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2009/09/30 07:17:17 UTC

svn commit: r820179 [1/2] - in /hadoop/hive/branches/branch-0.4: ./ common/src/java/org/apache/hadoop/hive/conf/ conf/ eclipse-templates/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/apache/ha...

Author: namit
Date: Wed Sep 30 05:17:14 2009
New Revision: 820179

URL: http://svn.apache.org/viewvc?rev=820179&view=rev
Log:

HIVE-857. Transform script should support objects of same type. (Zheng Shao via namit)


Added:
    hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/RecordReader.java
    hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/TextRecordReader.java
    hadoop/hive/branches/branch-0.4/ql/src/test/queries/clientpositive/input38.q
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input38.q.out
Modified:
    hadoop/hive/branches/branch-0.4/CHANGES.txt
    hadoop/hive/branches/branch-0.4/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hadoop/hive/branches/branch-0.4/conf/hive-default.xml
    hadoop/hive/branches/branch-0.4/eclipse-templates/.classpath
    hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
    hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
    hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
    hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java
    hadoop/hive/branches/branch-0.4/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
    hadoop/hive/branches/branch-0.4/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientnegative/script_error.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input14.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input14_limit.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input17.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input18.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input20.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input33.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input34.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input35.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input36.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input5.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce1.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce2.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce3.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce4.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce7.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce8.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/nullscript.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/ppd_transform.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/regexp_extract.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/transform_ppr1.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/transform_ppr2.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/compiler/parse/input20.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/compiler/parse/input4.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/compiler/parse/input5.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/compiler/plan/input20.q.xml
    hadoop/hive/branches/branch-0.4/ql/src/test/results/compiler/plan/input4.q.xml
    hadoop/hive/branches/branch-0.4/ql/src/test/results/compiler/plan/input5.q.xml
    hadoop/hive/branches/branch-0.4/serde/if/serde.thrift
    hadoop/hive/branches/branch-0.4/serde/src/gen-java/org/apache/hadoop/hive/serde/Constants.java
    hadoop/hive/branches/branch-0.4/serde/src/gen-php/serde_constants.php
    hadoop/hive/branches/branch-0.4/serde/src/gen-py/org_apache_hadoop_hive_serde/constants.py
    hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/InputByteBuffer.java
    hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/OutputByteBuffer.java
    hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java

Modified: hadoop/hive/branches/branch-0.4/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/CHANGES.txt?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/CHANGES.txt (original)
+++ hadoop/hive/branches/branch-0.4/CHANGES.txt Wed Sep 30 05:17:14 2009
@@ -122,7 +122,7 @@
 
     HIVE-691. show documentation for functions. (Emil Ibrishimov via namit)
 
-    HIVE-743. Let user specify serde for custom sctipts.   
+    HIVE-743. Let user specify serde for custom sctipts.
     (Namit Jain via rmurthy)
 
     HIVE-749. add hive.optimize.pruner
@@ -132,6 +132,8 @@
 
     HIVE-187. Preliminary ODBC Support. (Eric Hwang via rmurthy)
 
+    HIVE-857. Transform script should support objects of same type. (Zheng Shao via namit)
+
   IMPROVEMENTS
     HIVE-389. Option to build without ivy (jssarma)
 
@@ -217,7 +219,7 @@
     HIVE-737. Support having hadoop jars in HADOOP_HOME/build for running
     hive cli. (Johan Oskarsson via athusoo)
 
-    HIVE-760. Add version info to META-INF/MANIFEST.MF 
+    HIVE-760. Add version info to META-INF/MANIFEST.MF
     (Bill Graham via rmurthy)
 
     HIVE-679. Adding the basic JDBC methods to allow querying using the

Modified: hadoop/hive/branches/branch-0.4/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hadoop/hive/branches/branch-0.4/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed Sep 30 05:17:14 2009
@@ -131,6 +131,10 @@
     //Location of Hive run time structured log file
     HIVEHISTORYFILELOC("hive.querylog.location",  "/tmp/"+System.getProperty("user.name")),
     
+    // Default serde and record reader for user scripts
+    HIVESCRIPTSERDE("hive.script.serde", "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"),
+    HIVESCRIPTRECORDREADER("hive.script.recordreader", "org.apache.hadoop.hive.ql.exec.TextRecordReader"),
+    
     // HWI
     HIVEHWILISTENHOST("hive.hwi.listen.host","0.0.0.0"),
     HIVEHWILISTENPORT("hive.hwi.listen.port","9999"),

Modified: hadoop/hive/branches/branch-0.4/conf/hive-default.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/conf/hive-default.xml?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/conf/hive-default.xml (original)
+++ hadoop/hive/branches/branch-0.4/conf/hive-default.xml Wed Sep 30 05:17:14 2009
@@ -339,4 +339,16 @@
   <description>Whether Hive Tranform/Map/Reduce Clause should automatically send progress information to TaskTracker to avoid the task getting killed because of inactivity.  Hive sends progress information when the script is outputting to stderr.  This option removes the need of periodically producing stderr messages, but users should be cautious because this may prevent infinite loops in the scripts to be killed by TaskTracker.  </description>
 </property>
 
+<property>
+  <name>hive.script.serde</name>
+  <value>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</value>
+  <description>The default serde for trasmitting input data to and reading output data from the user scripts. </description>
+</property>
+
+<property>
+  <name>hive.script.recordreader</name>
+  <value>org.apache.hadoop.hive.ql.exec.TextRecordReader</value>
+  <description>The default record reader for reading data from the user scripts. </description>
+</property>
+
 </configuration>

Modified: hadoop/hive/branches/branch-0.4/eclipse-templates/.classpath
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/eclipse-templates/.classpath?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/eclipse-templates/.classpath (original)
+++ hadoop/hive/branches/branch-0.4/eclipse-templates/.classpath Wed Sep 30 05:17:14 2009
@@ -25,6 +25,7 @@
 	<classpathentry exported="true" kind="lib" path="ql/lib/antlr-runtime-3.0.1.jar"/>
 	<classpathentry exported="true" kind="lib" path="testlibs/junit-3.8.1.jar"/>
 	<classpathentry kind="src" path="build/ql/gen-java"/>
+	<classpathentry kind="src" path="build/contrib/test/src"/>
 	<classpathentry kind="src" path="build/ql/test/src"/>
 	<classpathentry kind="src" path="cli/src/java"/>
 	<classpathentry kind="src" path="common/src/java"/>

Added: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/RecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/RecordReader.java?rev=820179&view=auto
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/RecordReader.java (added)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/RecordReader.java Wed Sep 30 05:17:14 2009
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+
+
+public interface RecordReader {
+
+  public void initialize(InputStream in, Configuration conf) throws IOException;
+
+  public Writable createRow() throws IOException;
+
+  public int next(Writable row) throws IOException;
+  
+  public void close() throws IOException;
+}

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java Wed Sep 30 05:17:14 2009
@@ -41,10 +41,11 @@
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.Serializer;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.LineRecordReader.LineReader;
 import org.apache.hadoop.util.StringUtils;
 
 
@@ -186,7 +187,7 @@
 
       scriptOutputDeserializer = conf.getScriptOutputInfo().getDeserializerClass().newInstance();
       scriptOutputDeserializer.initialize(hconf, conf.getScriptOutputInfo().getProperties());
-
+      
       scriptInputSerializer = (Serializer)conf.getScriptInputInfo().getDeserializerClass().newInstance();
       scriptInputSerializer.initialize(hconf, conf.getScriptInputInfo().getProperties());
 
@@ -222,9 +223,17 @@
       scriptOut = new DataOutputStream(new BufferedOutputStream(scriptPid.getOutputStream()));
       scriptIn = new DataInputStream(new BufferedInputStream(scriptPid.getInputStream()));
       scriptErr = new DataInputStream(new BufferedInputStream(scriptPid.getErrorStream()));
-      outThread = new StreamThread(scriptIn, new OutputStreamProcessor(
+      
+      RecordReader scriptOutputReader = conf.getOutRecordReaderClass().newInstance();
+      scriptOutputReader.initialize(scriptIn, hconf);
+      
+      outThread = new StreamThread(scriptOutputReader, new OutputStreamProcessor(
           scriptOutputDeserializer.getObjectInspector()), "OutputProcessor");
-      errThread = new StreamThread(scriptErr,
+      
+      RecordReader scriptErrReader = conf.getOutRecordReaderClass().newInstance();
+      scriptErrReader.initialize(scriptErr, hconf);
+      
+      errThread = new StreamThread(scriptErrReader,
                                    new ErrorStreamProcessor
                                    (HiveConf.getIntVar(hconf, HiveConf.ConfVars.SCRIPTERRORLIMIT)),
                                    "ErrorProcessor");
@@ -318,7 +327,7 @@
 
 
   interface StreamProcessor {
-    public void processLine(Text line) throws HiveException;
+    public void processLine(Writable line) throws HiveException;
     public void close() throws HiveException;
   }
 
@@ -329,7 +338,7 @@
     public OutputStreamProcessor(ObjectInspector rowInspector) {
       this.rowInspector = rowInspector;
     }
-    public void processLine(Text line) throws HiveException {
+    public void processLine(Writable line) throws HiveException {
       try {
         row = scriptOutputDeserializer.deserialize(line);
       } catch (SerDeException e) {
@@ -360,10 +369,16 @@
       lastReportTime = 0;
     }
     
-    public void processLine(Text line) throws HiveException {
+    public void processLine(Writable line) throws HiveException {
       
       String stringLine = line.toString();
+      int len = 0;
       
+      if (line instanceof Text) 
+        len = ((Text)line).getLength();
+      else if (line instanceof BytesWritable)
+        len = ((BytesWritable)line).getSize();
+          
       // Report progress for each stderr line, but no more frequently than once per minute.
       long now = System.currentTimeMillis();
       // reporter is a member variable of the Operator class.
@@ -375,11 +390,11 @@
       if((maxBytes < 0) || (bytesCopied < maxBytes)) {
         System.err.println(stringLine);
       }
-      if (bytesCopied < maxBytes && bytesCopied + line.getLength() >= maxBytes) {
+      if (bytesCopied < maxBytes && bytesCopied + len >= maxBytes) {
         System.err.println("Operator " + id + " " + getName()
             + ": exceeding stderr limit of " + maxBytes + " bytes, will truncate stderr messages.");
       }      
-      bytesCopied += line.getLength();
+      bytesCopied += len;
     }
     public void close() {
     }
@@ -389,11 +404,11 @@
 
   class StreamThread extends Thread {
 
-    InputStream in;
+    RecordReader in;
     StreamProcessor proc;
     String name;
 
-    StreamThread(InputStream in, StreamProcessor proc, String name) {
+    StreamThread(RecordReader in, StreamProcessor proc, String name) {
       this.in = in;
       this.proc = proc;
       this.name = name;
@@ -401,14 +416,11 @@
     }
 
     public void run() {
-      LineReader lineReader = null;
       try {
-        Text row = new Text();
-        lineReader = new LineReader((InputStream)in, hconf);
+        Writable row = in.createRow();
 
         while(true) {
-          row.clear();
-          long bytes = lineReader.readLine(row);
+          long bytes = in.next(row);
           if(bytes <= 0) {
             break;
           }
@@ -421,10 +433,9 @@
         LOG.warn(StringUtils.stringifyException(th));
       } finally {
         try {
-          if(lineReader != null) {
-            lineReader.close();
+          if (in != null) {
+            in.close();
           }
-          in.close();
           proc.close();
         } catch (Exception e) {
           LOG.warn(name + ": error in closing ..");

Added: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/TextRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/TextRecordReader.java?rev=820179&view=auto
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/TextRecordReader.java (added)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/TextRecordReader.java Wed Sep 30 05:17:14 2009
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.hadoop.mapred.LineRecordReader.LineReader;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+
+
+public class TextRecordReader implements RecordReader {
+
+  private LineReader  lineReader;
+  private InputStream in;
+  private Text        row;
+
+  public void initialize(InputStream in, Configuration conf) throws IOException {
+    lineReader = new LineReader(in, conf);
+    this.in = in;
+  }
+
+  public Writable createRow() throws IOException {
+    row = new Text();
+    return row;
+  }
+
+  public int next(Writable row) throws IOException {
+    if (lineReader == null)
+      return -1;
+
+    return lineReader.readLine((Text)row);
+  }
+  
+  public void close() throws IOException {
+    if (in != null)
+      in.close();
+  }
+}

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Wed Sep 30 05:17:14 2009
@@ -137,6 +137,7 @@
 TOK_HINTARGLIST;
 TOK_USERSCRIPTCOLNAMES;
 TOK_USERSCRIPTCOLSCHEMA;
+TOK_RECORDREADER;
 }
 
 
@@ -371,6 +372,14 @@
 @after { msgs.pop(); }
     : serdeFormat -> ^(TOK_SERDE serdeFormat)
     | serdePropertiesFormat -> ^(TOK_SERDE serdePropertiesFormat)
+    |   -> ^(TOK_SERDE)
+    ;
+
+recordReader
+@init { msgs.push("record reader specification"); }
+@after { msgs.pop(); }
+    : KW_RECORDREADER StringLiteral -> ^(TOK_RECORDREADER StringLiteral)
+    |   -> ^(TOK_RECORDREADER)
     ;
 
 serdeFormat
@@ -730,9 +739,11 @@
     ( KW_SELECT KW_TRANSFORM LPAREN selectExpressionList RPAREN
       | KW_MAP    selectExpressionList
       | KW_REDUCE selectExpressionList )
-    inSerde=serde? KW_USING StringLiteral 
-    ( KW_AS ((LPAREN (aliasList | columnNameTypeList) RPAREN) | (aliasList | columnNameTypeList)) outSerde=serde?)?
-    -> ^(TOK_TRANSFORM selectExpressionList $inSerde? StringLiteral aliasList? columnNameTypeList? $outSerde?)
+    inSerde=serde 
+    KW_USING StringLiteral 
+    ( KW_AS ((LPAREN (aliasList | columnNameTypeList) RPAREN) | (aliasList | columnNameTypeList)))? 
+    outSerde=serde outRec=recordReader
+    -> ^(TOK_TRANSFORM selectExpressionList $inSerde StringLiteral $outSerde $outRec aliasList? columnNameTypeList?)
     ;
     
 selectExpression
@@ -1307,6 +1318,7 @@
 KW_CONTINUE: 'CONTINUE';
 KW_CURSOR: 'CURSOR';
 KW_TRIGGER: 'TRIGGER';
+KW_RECORDREADER: 'RECORDREADER';
 
 
 // Operators

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Sep 30 05:17:14 2009
@@ -48,6 +48,7 @@
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorFactory;
+import org.apache.hadoop.hive.ql.exec.RecordReader;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
 import org.apache.hadoop.hive.ql.exec.RowSchema;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -127,6 +128,8 @@
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.ql.exec.TextRecordReader;
 
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
@@ -1111,18 +1114,18 @@
     return cmd;
   }
 
-  private tableDesc getTableDescFromSerDe(ASTNode child, String cols, boolean defaultCols) throws SemanticException {
+  private tableDesc getTableDescFromSerDe(ASTNode child, String cols, String colTypes, boolean defaultCols) throws SemanticException {
     if (child.getType() == HiveParser.TOK_SERDENAME) {
       String serdeName = unescapeSQLString(child.getChild(0).getText());
       Class<? extends Deserializer> serdeClass = null;
       
       try {
-        serdeClass = (Class<? extends Deserializer>)Class.forName(serdeName);
+        serdeClass = (Class<? extends Deserializer>)Class.forName(serdeName, true, JavaUtils.getClassLoader());
       } catch (ClassNotFoundException e) {
         throw new SemanticException(e);
       }
       
-      tableDesc tblDesc = PlanUtils.getTableDesc(serdeClass, Integer.toString(Utilities.tabCode), cols, defaultCols);
+      tableDesc tblDesc = PlanUtils.getTableDesc(serdeClass, Integer.toString(Utilities.tabCode), cols, colTypes, defaultCols, true);
       // copy all the properties
       if (child.getChildCount() == 2) {
         ASTNode prop = (ASTNode)((ASTNode)child.getChild(1)).getChild(0);
@@ -1135,7 +1138,7 @@
       return tblDesc;
     }
     else if (child.getType() == HiveParser.TOK_SERDEPROPS) {
-      tableDesc tblDesc = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.ctrlaCode), cols, defaultCols);
+      tableDesc tblDesc = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.ctrlaCode), cols, colTypes, defaultCols);
       int numChildRowFormat = child.getChildCount();
       for (int numC = 0; numC < numChildRowFormat; numC++)
       {
@@ -1170,112 +1173,136 @@
     // should never come here
     return null;
   }
-
+  
   @SuppressWarnings("nls")
   private Operator genScriptPlan(ASTNode trfm, QB qb,
       Operator input) throws SemanticException {
     // If there is no "AS" clause, the output schema will be "key,value"
     ArrayList<ColumnInfo> outputCols = new ArrayList<ColumnInfo>();
-    boolean defaultOutputColList = true;
-    int     inputSerDeChildNum = -1, outputSerDeChildNum = -1;
-    int     outputColumnNamesPos = -1, outputColumnSchemaPos = -1;
-    int     execPos = 1;
-
+    int     inputSerDeNum  = 1;
+    int     outputSerDeNum = 3, outputRecordReaderNum = 4;
+    int     outputColsNum = 5;
+    boolean outputColNames = false, outputColSchemas = false;
+    int     execPos = 2;
+    boolean defaultOutputCols = false;
+    
     // Go over all the children
-    for (int pos = 0; pos < trfm.getChildCount(); pos++) {
-      ASTNode child = (ASTNode)trfm.getChild(pos);
-      if (child.getType() == HiveParser.TOK_ALIASLIST) {
-        defaultOutputColList = false;
-        outputColumnNamesPos = pos;
-        break;
-      }
-      else if (child.getType() == HiveParser.TOK_TABCOLLIST) {
-        defaultOutputColList = false;
-        outputColumnSchemaPos = pos;
-        break;
-      }
+    if (trfm.getChildCount() > outputColsNum) {
+      ASTNode outCols = (ASTNode)trfm.getChild(outputColsNum);
+      if (outCols.getType() == HiveParser.TOK_ALIASLIST) 
+        outputColNames = true;
+      else if (outCols.getType() == HiveParser.TOK_TABCOLLIST) 
+        outputColSchemas = true;
     }
 
-    // input serde specified
-    if ((trfm.getChildCount() >  1) && 
-        (trfm.getChild(1).getType() == HiveParser.TOK_SERDE)) {
-      inputSerDeChildNum  = 1;
-      execPos++;
-    }
-
-    // output serde specified
-    int checkChildNum = -1;
-    if (outputColumnNamesPos >= 0)
-      checkChildNum = outputColumnNamesPos + 1;
-    else if (outputColumnSchemaPos >= 0)
-      checkChildNum = outputColumnSchemaPos + 1;
-    
-    if (checkChildNum >= 0) {
-      if ((trfm.getChildCount() > (checkChildNum))
-          && (trfm.getChild(checkChildNum).getType() == HiveParser.TOK_SERDE))
-        outputSerDeChildNum  = checkChildNum;
-    }
-    
     // If column type is not specified, use a string
-    if (defaultOutputColList) {
+    if (!outputColNames && !outputColSchemas) {
       outputCols.add(new ColumnInfo("key", TypeInfoFactory.stringTypeInfo, null, false));
       outputCols.add(new ColumnInfo("value", TypeInfoFactory.stringTypeInfo, null, false));
+      defaultOutputCols = true;
     } 
-    else if (outputColumnNamesPos >= 0) {
-      ASTNode collist = (ASTNode) trfm.getChild(outputColumnNamesPos);
-      int ccount = collist.getChildCount();
-      for (int i=0; i < ccount; ++i) {
-        outputCols.add(new ColumnInfo(unescapeIdentifier(((ASTNode)collist.getChild(i)).getText()), TypeInfoFactory.stringTypeInfo, null, false));
-      }
-    }
     else {
-      assert outputColumnSchemaPos >= 0;
-      ASTNode collist = (ASTNode) trfm.getChild(outputColumnSchemaPos);
+      ASTNode collist = (ASTNode) trfm.getChild(outputColsNum);
       int ccount = collist.getChildCount();
-      for (int i=0; i < ccount; ++i) {
-        ASTNode child = (ASTNode) collist.getChild(i);
-        assert child.getType() == HiveParser.TOK_TABCOL;  
-        outputCols.add(new ColumnInfo(unescapeIdentifier(((ASTNode)child.getChild(0)).getText()), 
-                                      TypeInfoUtils.getTypeInfoFromTypeString(DDLSemanticAnalyzer.getTypeName(((ASTNode)child.getChild(1)).getType())), null, false));
+      
+      if (outputColNames) {
+        for (int i=0; i < ccount; ++i) {
+          outputCols.add(new ColumnInfo(unescapeIdentifier(((ASTNode)collist.getChild(i)).getText()), TypeInfoFactory.stringTypeInfo, null, false));
+        }
+      }
+      else {
+        for (int i=0; i < ccount; ++i) {
+          ASTNode child = (ASTNode) collist.getChild(i);
+          assert child.getType() == HiveParser.TOK_TABCOL;  
+          outputCols.add(new ColumnInfo(unescapeIdentifier(((ASTNode)child.getChild(0)).getText()), 
+                                        TypeInfoUtils.getTypeInfoFromTypeString(DDLSemanticAnalyzer.getTypeName(((ASTNode)child.getChild(1)).getType())), null, false));
+        }
       }
     }
-
+    
     RowResolver out_rwsch = new RowResolver();
     StringBuilder columns = new StringBuilder();
+    StringBuilder columnTypes = new StringBuilder();
+    
     for (int i = 0; i < outputCols.size(); ++i) {
       if (i != 0) {
         columns.append(",");
+        columnTypes.append(",");
       }
+      
       columns.append(outputCols.get(i).getInternalName());
+      columnTypes.append(outputCols.get(i).getType().getTypeName());
+      
       out_rwsch.put(
         qb.getParseInfo().getAlias(),
         outputCols.get(i).getInternalName(),
         outputCols.get(i));
     }
 
+    StringBuilder inpColumns = new StringBuilder();
+    StringBuilder inpColumnTypes = new StringBuilder();
+    Vector<ColumnInfo> inputSchema = opParseCtx.get(input).getRR().getColumnInfos();
+    for (int i = 0; i < inputSchema.size(); ++i) {
+      if (i != 0) {
+        inpColumns.append(",");
+        inpColumnTypes.append(",");
+      }
+      
+      inpColumns.append(inputSchema.get(i).getInternalName());
+      inpColumnTypes.append(inputSchema.get(i).getType().getTypeName());        
+    }
+    
     tableDesc outInfo;
     tableDesc inInfo;
+    String    defaultSerdeName =  conf.getVar(HiveConf.ConfVars.HIVESCRIPTSERDE);
+    Class<? extends Deserializer> serde;
 
-    if (inputSerDeChildNum < 0)
-      inInfo = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.tabCode), "");
-    else 
-      inInfo = getTableDescFromSerDe((ASTNode)(((ASTNode)trfm.getChild(inputSerDeChildNum))).getChild(0), "", false);
+    try {
+      serde = (Class<? extends Deserializer>)Class.forName(defaultSerdeName, true, JavaUtils.getClassLoader());
+    } catch (ClassNotFoundException e) {
+      throw new SemanticException(e);
+    }
 
-    if (outputSerDeChildNum < 0)
-      outInfo = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.tabCode), columns.toString(), defaultOutputColList);
+    // Input and Output Serdes
+    if (trfm.getChild(inputSerDeNum).getChildCount() > 0)
+      inInfo = getTableDescFromSerDe((ASTNode)(((ASTNode)trfm.getChild(inputSerDeNum))).getChild(0), inpColumns.toString(), inpColumnTypes.toString(), false);
     else 
-      outInfo = getTableDescFromSerDe((ASTNode)(((ASTNode)trfm.getChild(outputSerDeChildNum))).getChild(0), columns.toString(), defaultOutputColList);
+      inInfo = PlanUtils.getTableDesc(serde, Integer.toString(Utilities.tabCode), inpColumns.toString(), inpColumnTypes.toString(), false, true);
 
+    if (trfm.getChild(inputSerDeNum).getChildCount() > 0)
+      outInfo = getTableDescFromSerDe((ASTNode)(((ASTNode)trfm.getChild(outputSerDeNum))).getChild(0), columns.toString(), columnTypes.toString(), false);
+    // This is for backward compatibility. If the user did not specify the output column list, we assume that there are 2 columns: key and value.
+    // However, if the script outputs: col1, col2, col3 seperated by TAB, the requirement is: key is col and value is (col2 TAB col3)
+    else
+      outInfo = PlanUtils.getTableDesc(serde, Integer.toString(Utilities.tabCode), columns.toString(), columnTypes.toString(), defaultOutputCols);
+
+    // Output record readers
+    Class <? extends RecordReader> outRecordReader = getRecordReader((ASTNode)trfm.getChild(outputRecordReaderNum));
+    
     Operator output = putOpInsertMap(OperatorFactory
             .getAndMakeChild(
-                new scriptDesc(
-                               getFixedCmd(stripQuotes(trfm.getChild(execPos).getText())),
-                      outInfo, inInfo),
+                new scriptDesc(getFixedCmd(stripQuotes(trfm.getChild(execPos).getText())), 
+                    inInfo, outInfo, outRecordReader),
                 new RowSchema(out_rwsch.getColumnInfos()), input), out_rwsch);
 
     return output;
   }
 
+  private Class<? extends RecordReader> getRecordReader(ASTNode node) throws SemanticException {
+    String name;
+
+    if (node.getChildCount() == 0) 
+      name = conf.getVar(HiveConf.ConfVars.HIVESCRIPTRECORDREADER);
+    else 
+      name = unescapeSQLString(node.getChild(0).getText());
+    
+    try {
+      return (Class<? extends RecordReader>)Class.forName(name, true, JavaUtils.getClassLoader());
+    } catch (ClassNotFoundException e) {
+      throw new SemanticException(e);
+    }
+  }
+  
   /**
    * This function is a wrapper of parseInfo.getGroupByForClause which automatically
    * translates SELECT DISTINCT a,b,c to SELECT a,b,c GROUP BY a,b,c.

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Wed Sep 30 05:17:14 2009
@@ -111,6 +111,13 @@
   public static tableDesc getTableDesc(Class<? extends Deserializer> serdeClass,
                                        String separatorCode, String columns, String columnTypes,
       boolean lastColumnTakesRestOfTheLine) {
+    return getTableDesc(serdeClass, separatorCode, columns, columnTypes, lastColumnTakesRestOfTheLine, false);
+  }
+
+  public static tableDesc getTableDesc(Class<? extends Deserializer> serdeClass,
+                                       String separatorCode, String columns, String columnTypes,
+                                       boolean lastColumnTakesRestOfTheLine, boolean useJSONForLazy) {
+
     Properties properties = Utilities.makeProperties(
       Constants.SERIALIZATION_FORMAT, separatorCode,
       Constants.LIST_COLUMNS, columns);
@@ -122,6 +129,16 @@
           Constants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
           "true");
     }
+
+    // It is not a very clean way, and should be modified later - due to compatiblity reasons,
+    // user sees the results as json for custom scripts and has no way for specifying that.
+    // Right now, it is hard-coded in the code
+    if (useJSONForLazy)
+      properties.setProperty(
+          Constants.SERIALIZATION_USE_JSON_OBJECTS,
+          "true");
+      
+      
     return new tableDesc(
       serdeClass,
       TextInputFormat.class,

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java Wed Sep 30 05:17:14 2009
@@ -20,6 +20,8 @@
 
 import java.io.Serializable;
 
+import org.apache.hadoop.hive.ql.exec.RecordReader;
+
 @explain(displayName="Transform Operator")
 public class scriptDesc implements Serializable {
   private static final long serialVersionUID = 1L;
@@ -28,16 +30,19 @@
   private tableDesc scriptOutputInfo;
   // Describe how to serialize data out to user script
   private tableDesc scriptInputInfo;
+  private Class<? extends RecordReader> outRecordReaderClass;
 
   public scriptDesc() { }
   public scriptDesc(
     final String scriptCmd,
+    final tableDesc scriptInputInfo,
     final tableDesc scriptOutputInfo,
-    final tableDesc scriptInputInfo) {
-
+    final Class<? extends RecordReader> outRecordReaderClass) {
+    
     this.scriptCmd = scriptCmd;
-    this.scriptOutputInfo = scriptOutputInfo;
     this.scriptInputInfo = scriptInputInfo;
+    this.scriptOutputInfo = scriptOutputInfo;
+    this.outRecordReaderClass = outRecordReaderClass;
   }
   
   @explain(displayName="command")
@@ -61,4 +66,17 @@
   public void setScriptInputInfo(tableDesc scriptInputInfo) {
     this.scriptInputInfo = scriptInputInfo;
   }
+  /**
+   * @return the outRecordReaderClass
+   */
+  public Class<? extends RecordReader> getOutRecordReaderClass() {
+    return outRecordReaderClass;
+  }
+  /**
+   * @param outRecordReaderClass the outRecordReaderClass to set
+   */
+  public void setOutRecordReaderClass(
+      Class<? extends RecordReader> outRecordReaderClass) {
+    this.outRecordReaderClass = outRecordReaderClass;
+  }
 }

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Wed Sep 30 05:17:14 2009
@@ -211,7 +211,8 @@
     Operator<scriptDesc> op2 = OperatorFactory.get
       (new scriptDesc("/bin/cat",
           PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
-          PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value")),
+          PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"), 
+          TextRecordReader.class),
        op3);
 
 
@@ -350,8 +351,9 @@
 
     Operator<scriptDesc> op0 = OperatorFactory.get
     (new scriptDesc("/bin/cat",
+        PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
         PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
-        PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value")),
+        TextRecordReader.class),
      op1);
 
     Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
@@ -431,7 +433,8 @@
     Operator<scriptDesc> op0 = OperatorFactory.get
       (new scriptDesc("\'/bin/cat\'",
           PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
-          PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue")),
+          PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
+          TextRecordReader.class),
        op1);
 
     Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Wed Sep 30 05:17:14 2009
@@ -191,7 +191,7 @@
       // scriptOperator to echo the output of the select
       tableDesc scriptOutput = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
       tableDesc scriptInput  = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
-      scriptDesc sd = new scriptDesc("cat", scriptOutput, scriptInput);
+      scriptDesc sd = new scriptDesc("cat", scriptOutput, scriptInput, TextRecordReader.class);
       Operator<scriptDesc> sop = OperatorFactory.getAndMakeChild(sd, op);
 
       // Collect operator to observe the output of the script

Added: hadoop/hive/branches/branch-0.4/ql/src/test/queries/clientpositive/input38.q
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/queries/clientpositive/input38.q?rev=820179&view=auto
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/queries/clientpositive/input38.q (added)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/queries/clientpositive/input38.q Wed Sep 30 05:17:14 2009
@@ -0,0 +1,22 @@
+drop table dest1;
+CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE;
+
+EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING '/bin/cat'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, tmap.value;
+
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING '/bin/cat'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, tmap.value;
+
+
+SELECT dest1.* FROM dest1;
+
+drop table dest1;

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientnegative/script_error.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientnegative/script_error.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientnegative/script_error.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientnegative/script_error.q.out Wed Sep 30 05:17:14 2009
@@ -2,7 +2,7 @@
 SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
 FROM src
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) '../data/scripts/error_script' (TOK_ALIASLIST tkey tvalue))))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE '../data/scripts/error_script' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue))))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -42,5 +42,5 @@
 query: SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
 FROM src
 Input: default/src
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2091756058/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_3/build/ql/tmp/95558599/10000
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input14.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input14.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input14.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input14.q.out Wed Sep 30 05:17:14 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -98,7 +98,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1767626690/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_0/build/ql/tmp/2113207275/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input14_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input14_limit.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input14_limit.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input14_limit.q.out Wed Sep 30 05:17:14 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)) (TOK_LIMIT 20))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)) (TOK_LIMIT 20))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -61,7 +61,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/34766397/10002 
+        file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_2/build/ql/tmp/1808475238/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -128,7 +128,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/452755876/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_2/build/ql/tmp/323540673/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input17.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input17.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input17.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input17.q.out Wed Sep 30 05:17:14 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (+ (. (TOK_TABLE_OR_COL src_thrift) aint) ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0)) ([ (. (TOK_TABLE_OR_COL src_thrift) lintstring) 0)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue)))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (+ (. (TOK_TABLE_OR_COL src_thrift) aint) ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0)) ([ (. (TOK_TABLE_OR_COL src_thrift) lintstring) 0)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -94,7 +94,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2071608019/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_1/build/ql/tmp/23609873/10000
 NULL	null
 -1461153966	{"myint":49,"mystring":"343","underscore_int":7}
 -1952710705	{"myint":25,"mystring":"125","underscore_int":5}

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input18.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input18.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input18.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input18.q.out Wed Sep 30 05:17:14 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value) (+ 1 2) (+ 3 4)) '/bin/cat'))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) key)) (TOK_SELEXPR (TOK_FUNCTION regexp_replace (. (TOK_TABLE_OR_COL tmap) value) '\t' '+'))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) key) 100))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value) (+ 1 2) (+ 3 4)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) key)) (TOK_SELEXPR (TOK_FUNCTION regexp_replace (. (TOK_TABLE_OR_COL tmap) value) '\t' '+'))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) key) 100))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -102,7 +102,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1817619374/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_2/build/ql/tmp/1347654745/10000
 0	val_0+3+7
 0	val_0+3+7
 0	val_0+3+7

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input20.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input20.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input20.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input20.q.out Wed Sep 30 05:17:14 2009
@@ -12,7 +12,7 @@
 USING '../data/scripts/input20_script'
 AS key, value
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) 'cat'))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) '../data/scripts/input20_script' (TOK_ALIASLIST key value))))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) TOK_SERDE 'cat' TOK_SERDE TOK_RECORDREADER))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) TOK_SERDE '../data/scripts/input20_script' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST key value))))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -109,7 +109,7 @@
 Output: default/dest1
 query: SELECT * FROM dest1 SORT BY key, value
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/660141896/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_1/build/ql/tmp/1527328237/10000
 1	105_105
 1	10_10
 1	111_111

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input33.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input33.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input33.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input33.q.out Wed Sep 30 05:17:14 2009
@@ -12,7 +12,7 @@
 USING '../data/scripts/input20_script'
 AS (key STRING, value STRING)
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) 'cat'))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) '../data/scripts/input20_script' (TOK_TABCOLLIST (TOK_TABCOL key TOK_STRING) (TOK_TABCOL value TOK_STRING)))))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) TOK_SERDE 'cat' TOK_SERDE TOK_RECORDREADER))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) TOK_SERDE '../data/scripts/input20_script' TOK_SERDE TOK_RECORDREADER (TOK_TABCOLLIST (TOK_TABCOL key TOK_STRING) (TOK_TABCOL value TOK_STRING)))))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -109,7 +109,7 @@
 Output: default/dest1
 query: SELECT * FROM dest1 SORT BY key, value
 Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/410502456/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_2/build/ql/tmp/2056666212/10000
 1	105_105
 1	10_10
 1	111_111

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input34.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input34.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input34.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input34.q.out Wed Sep 30 05:17:14 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe')) '/bin/cat' (TOK_ALIASLIST tkey tvalue) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'))))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe')) '/bin/cat' (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe')) TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -63,10 +63,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/njain/hive3/hive3/build/ql/tmp/1093532122/10000
+                destination: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_0/build/ql/tmp/1915467964/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/njain/hive3/hive3/build/ql/tmp/754709366/10002 
+              file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_0/build/ql/tmp/1344294449/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -111,7 +111,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/248092577/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_0/build/ql/tmp/1315708841/10000
 238	val_238
 86	val_86
 311	val_311

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input35.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input35.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input35.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input35.q.out Wed Sep 30 05:17:14 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002'))) '/bin/cat' (TOK_ALIASLIST tkey tvalue) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002')))))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002'))) '/bin/cat' (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002'))) TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -63,10 +63,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/njain/hive3/hive3/build/ql/tmp/525283863/10000
+                destination: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_1/build/ql/tmp/435422102/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/njain/hive3/hive3/build/ql/tmp/84195792/10002 
+              file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_1/build/ql/tmp/2143228595/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -111,7 +111,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1714970451/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_1/build/ql/tmp/1957197435/10000
 238	val_238
 86	val_86
 311	val_311

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input36.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input36.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input36.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input36.q.out Wed Sep 30 05:17:14 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002'))) '/bin/cat' (TOK_ALIASLIST tkey tvalue) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\003')))))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002'))) '/bin/cat' (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\003'))) TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -63,10 +63,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/njain/hive3/hive3/build/ql/tmp/1966676963/10000
+                destination: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_2/build/ql/tmp/1806933328/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/njain/hive3/hive3/build/ql/tmp/1028977168/10002 
+              file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_2/build/ql/tmp/1468092537/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -111,7 +111,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1073316238/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_2/build/ql/tmp/2130499517/10000
 NULL	NULL
 NULL	NULL
 NULL	NULL

Added: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input38.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input38.q.out?rev=820179&view=auto
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input38.q.out (added)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input38.q.out Wed Sep 30 05:17:14 2009
@@ -0,0 +1,611 @@
+query: drop table dest1
+query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING '/bin/cat'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, tmap.value
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value) (+ 1 2) (+ 3 4)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) value)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-4 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-4
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        tmap:src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+                    expr: (1 + 2)
+                    type: int
+                    expr: (3 + 4)
+                    type: int
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Transform Operator
+                command: /bin/cat
+                output info:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                Select Operator
+                  expressions:
+                        expr: key
+                        type: string
+                        expr: value
+                        type: string
+                  outputColumnNames: _col0, _col1
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 1
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: dest1
+
+  Stage: Stage-4
+    Conditional Operator
+      list of dependent Tasks:
+          Move Operator
+            files:
+                hdfs directory: true
+                destination: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/build/ql/tmp/599296378/10000
+          Map Reduce
+            Alias -> Map Operator Tree:
+              file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/build/ql/tmp/1510286313/10002 
+                  Reduce Output Operator
+                    sort order: 
+                    Map-reduce partition columns:
+                          expr: rand()
+                          type: double
+                    tag: -1
+                    value expressions:
+                          expr: key
+                          type: string
+                          expr: value
+                          type: string
+            Reduce Operator Tree:
+              Extract
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: dest1
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: dest1
+
+
+query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING '/bin/cat'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, tmap.value
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/build/ql/tmp/2059459025/10000
+238	val_238	3	7
+86	val_86	3	7
+311	val_311	3	7
+27	val_27	3	7
+165	val_165	3	7
+409	val_409	3	7
+255	val_255	3	7
+278	val_278	3	7
+98	val_98	3	7
+484	val_484	3	7
+265	val_265	3	7
+193	val_193	3	7
+401	val_401	3	7
+150	val_150	3	7
+273	val_273	3	7
+224	val_224	3	7
+369	val_369	3	7
+66	val_66	3	7
+128	val_128	3	7
+213	val_213	3	7
+146	val_146	3	7
+406	val_406	3	7
+429	val_429	3	7
+374	val_374	3	7
+152	val_152	3	7
+469	val_469	3	7
+145	val_145	3	7
+495	val_495	3	7
+37	val_37	3	7
+327	val_327	3	7
+281	val_281	3	7
+277	val_277	3	7
+209	val_209	3	7
+15	val_15	3	7
+82	val_82	3	7
+403	val_403	3	7
+166	val_166	3	7
+417	val_417	3	7
+430	val_430	3	7
+252	val_252	3	7
+292	val_292	3	7
+219	val_219	3	7
+287	val_287	3	7
+153	val_153	3	7
+193	val_193	3	7
+338	val_338	3	7
+446	val_446	3	7
+459	val_459	3	7
+394	val_394	3	7
+237	val_237	3	7
+482	val_482	3	7
+174	val_174	3	7
+413	val_413	3	7
+494	val_494	3	7
+207	val_207	3	7
+199	val_199	3	7
+466	val_466	3	7
+208	val_208	3	7
+174	val_174	3	7
+399	val_399	3	7
+396	val_396	3	7
+247	val_247	3	7
+417	val_417	3	7
+489	val_489	3	7
+162	val_162	3	7
+377	val_377	3	7
+397	val_397	3	7
+309	val_309	3	7
+365	val_365	3	7
+266	val_266	3	7
+439	val_439	3	7
+342	val_342	3	7
+367	val_367	3	7
+325	val_325	3	7
+167	val_167	3	7
+195	val_195	3	7
+475	val_475	3	7
+17	val_17	3	7
+113	val_113	3	7
+155	val_155	3	7
+203	val_203	3	7
+339	val_339	3	7
+0	val_0	3	7
+455	val_455	3	7
+128	val_128	3	7
+311	val_311	3	7
+316	val_316	3	7
+57	val_57	3	7
+302	val_302	3	7
+205	val_205	3	7
+149	val_149	3	7
+438	val_438	3	7
+345	val_345	3	7
+129	val_129	3	7
+170	val_170	3	7
+20	val_20	3	7
+489	val_489	3	7
+157	val_157	3	7
+378	val_378	3	7
+221	val_221	3	7
+92	val_92	3	7
+111	val_111	3	7
+47	val_47	3	7
+72	val_72	3	7
+4	val_4	3	7
+280	val_280	3	7
+35	val_35	3	7
+427	val_427	3	7
+277	val_277	3	7
+208	val_208	3	7
+356	val_356	3	7
+399	val_399	3	7
+169	val_169	3	7
+382	val_382	3	7
+498	val_498	3	7
+125	val_125	3	7
+386	val_386	3	7
+437	val_437	3	7
+469	val_469	3	7
+192	val_192	3	7
+286	val_286	3	7
+187	val_187	3	7
+176	val_176	3	7
+54	val_54	3	7
+459	val_459	3	7
+51	val_51	3	7
+138	val_138	3	7
+103	val_103	3	7
+239	val_239	3	7
+213	val_213	3	7
+216	val_216	3	7
+430	val_430	3	7
+278	val_278	3	7
+176	val_176	3	7
+289	val_289	3	7
+221	val_221	3	7
+65	val_65	3	7
+318	val_318	3	7
+332	val_332	3	7
+311	val_311	3	7
+275	val_275	3	7
+137	val_137	3	7
+241	val_241	3	7
+83	val_83	3	7
+333	val_333	3	7
+180	val_180	3	7
+284	val_284	3	7
+12	val_12	3	7
+230	val_230	3	7
+181	val_181	3	7
+67	val_67	3	7
+260	val_260	3	7
+404	val_404	3	7
+384	val_384	3	7
+489	val_489	3	7
+353	val_353	3	7
+373	val_373	3	7
+272	val_272	3	7
+138	val_138	3	7
+217	val_217	3	7
+84	val_84	3	7
+348	val_348	3	7
+466	val_466	3	7
+58	val_58	3	7
+8	val_8	3	7
+411	val_411	3	7
+230	val_230	3	7
+208	val_208	3	7
+348	val_348	3	7
+24	val_24	3	7
+463	val_463	3	7
+431	val_431	3	7
+179	val_179	3	7
+172	val_172	3	7
+42	val_42	3	7
+129	val_129	3	7
+158	val_158	3	7
+119	val_119	3	7
+496	val_496	3	7
+0	val_0	3	7
+322	val_322	3	7
+197	val_197	3	7
+468	val_468	3	7
+393	val_393	3	7
+454	val_454	3	7
+100	val_100	3	7
+298	val_298	3	7
+199	val_199	3	7
+191	val_191	3	7
+418	val_418	3	7
+96	val_96	3	7
+26	val_26	3	7
+165	val_165	3	7
+327	val_327	3	7
+230	val_230	3	7
+205	val_205	3	7
+120	val_120	3	7
+131	val_131	3	7
+51	val_51	3	7
+404	val_404	3	7
+43	val_43	3	7
+436	val_436	3	7
+156	val_156	3	7
+469	val_469	3	7
+468	val_468	3	7
+308	val_308	3	7
+95	val_95	3	7
+196	val_196	3	7
+288	val_288	3	7
+481	val_481	3	7
+457	val_457	3	7
+98	val_98	3	7
+282	val_282	3	7
+197	val_197	3	7
+187	val_187	3	7
+318	val_318	3	7
+318	val_318	3	7
+409	val_409	3	7
+470	val_470	3	7
+137	val_137	3	7
+369	val_369	3	7
+316	val_316	3	7
+169	val_169	3	7
+413	val_413	3	7
+85	val_85	3	7
+77	val_77	3	7
+0	val_0	3	7
+490	val_490	3	7
+87	val_87	3	7
+364	val_364	3	7
+179	val_179	3	7
+118	val_118	3	7
+134	val_134	3	7
+395	val_395	3	7
+282	val_282	3	7
+138	val_138	3	7
+238	val_238	3	7
+419	val_419	3	7
+15	val_15	3	7
+118	val_118	3	7
+72	val_72	3	7
+90	val_90	3	7
+307	val_307	3	7
+19	val_19	3	7
+435	val_435	3	7
+10	val_10	3	7
+277	val_277	3	7
+273	val_273	3	7
+306	val_306	3	7
+224	val_224	3	7
+309	val_309	3	7
+389	val_389	3	7
+327	val_327	3	7
+242	val_242	3	7
+369	val_369	3	7
+392	val_392	3	7
+272	val_272	3	7
+331	val_331	3	7
+401	val_401	3	7
+242	val_242	3	7
+452	val_452	3	7
+177	val_177	3	7
+226	val_226	3	7
+5	val_5	3	7
+497	val_497	3	7
+402	val_402	3	7
+396	val_396	3	7
+317	val_317	3	7
+395	val_395	3	7
+58	val_58	3	7
+35	val_35	3	7
+336	val_336	3	7
+95	val_95	3	7
+11	val_11	3	7
+168	val_168	3	7
+34	val_34	3	7
+229	val_229	3	7
+233	val_233	3	7
+143	val_143	3	7
+472	val_472	3	7
+322	val_322	3	7
+498	val_498	3	7
+160	val_160	3	7
+195	val_195	3	7
+42	val_42	3	7
+321	val_321	3	7
+430	val_430	3	7
+119	val_119	3	7
+489	val_489	3	7
+458	val_458	3	7
+78	val_78	3	7
+76	val_76	3	7
+41	val_41	3	7
+223	val_223	3	7
+492	val_492	3	7
+149	val_149	3	7
+449	val_449	3	7
+218	val_218	3	7
+228	val_228	3	7
+138	val_138	3	7
+453	val_453	3	7
+30	val_30	3	7
+209	val_209	3	7
+64	val_64	3	7
+468	val_468	3	7
+76	val_76	3	7
+74	val_74	3	7
+342	val_342	3	7
+69	val_69	3	7
+230	val_230	3	7
+33	val_33	3	7
+368	val_368	3	7
+103	val_103	3	7
+296	val_296	3	7
+113	val_113	3	7
+216	val_216	3	7
+367	val_367	3	7
+344	val_344	3	7
+167	val_167	3	7
+274	val_274	3	7
+219	val_219	3	7
+239	val_239	3	7
+485	val_485	3	7
+116	val_116	3	7
+223	val_223	3	7
+256	val_256	3	7
+263	val_263	3	7
+70	val_70	3	7
+487	val_487	3	7
+480	val_480	3	7
+401	val_401	3	7
+288	val_288	3	7
+191	val_191	3	7
+5	val_5	3	7
+244	val_244	3	7
+438	val_438	3	7
+128	val_128	3	7
+467	val_467	3	7
+432	val_432	3	7
+202	val_202	3	7
+316	val_316	3	7
+229	val_229	3	7
+469	val_469	3	7
+463	val_463	3	7
+280	val_280	3	7
+2	val_2	3	7
+35	val_35	3	7
+283	val_283	3	7
+331	val_331	3	7
+235	val_235	3	7
+80	val_80	3	7
+44	val_44	3	7
+193	val_193	3	7
+321	val_321	3	7
+335	val_335	3	7
+104	val_104	3	7
+466	val_466	3	7
+366	val_366	3	7
+175	val_175	3	7
+403	val_403	3	7
+483	val_483	3	7
+53	val_53	3	7
+105	val_105	3	7
+257	val_257	3	7
+406	val_406	3	7
+409	val_409	3	7
+190	val_190	3	7
+406	val_406	3	7
+401	val_401	3	7
+114	val_114	3	7
+258	val_258	3	7
+90	val_90	3	7
+203	val_203	3	7
+262	val_262	3	7
+348	val_348	3	7
+424	val_424	3	7
+12	val_12	3	7
+396	val_396	3	7
+201	val_201	3	7
+217	val_217	3	7
+164	val_164	3	7
+431	val_431	3	7
+454	val_454	3	7
+478	val_478	3	7
+298	val_298	3	7
+125	val_125	3	7
+431	val_431	3	7
+164	val_164	3	7
+424	val_424	3	7
+187	val_187	3	7
+382	val_382	3	7
+5	val_5	3	7
+70	val_70	3	7
+397	val_397	3	7
+480	val_480	3	7
+291	val_291	3	7
+24	val_24	3	7
+351	val_351	3	7
+255	val_255	3	7
+104	val_104	3	7
+70	val_70	3	7
+163	val_163	3	7
+438	val_438	3	7
+119	val_119	3	7
+414	val_414	3	7
+200	val_200	3	7
+491	val_491	3	7
+237	val_237	3	7
+439	val_439	3	7
+360	val_360	3	7
+248	val_248	3	7
+479	val_479	3	7
+305	val_305	3	7
+417	val_417	3	7
+199	val_199	3	7
+444	val_444	3	7
+120	val_120	3	7
+429	val_429	3	7
+169	val_169	3	7
+443	val_443	3	7
+323	val_323	3	7
+325	val_325	3	7
+277	val_277	3	7
+230	val_230	3	7
+478	val_478	3	7
+178	val_178	3	7
+468	val_468	3	7
+310	val_310	3	7
+317	val_317	3	7
+333	val_333	3	7
+493	val_493	3	7
+460	val_460	3	7
+207	val_207	3	7
+249	val_249	3	7
+265	val_265	3	7
+480	val_480	3	7
+83	val_83	3	7
+136	val_136	3	7
+353	val_353	3	7
+172	val_172	3	7
+214	val_214	3	7
+462	val_462	3	7
+233	val_233	3	7
+406	val_406	3	7
+133	val_133	3	7
+175	val_175	3	7
+189	val_189	3	7
+454	val_454	3	7
+375	val_375	3	7
+401	val_401	3	7
+421	val_421	3	7
+407	val_407	3	7
+384	val_384	3	7
+256	val_256	3	7
+26	val_26	3	7
+134	val_134	3	7
+67	val_67	3	7
+384	val_384	3	7
+379	val_379	3	7
+18	val_18	3	7
+462	val_462	3	7
+492	val_492	3	7
+100	val_100	3	7
+298	val_298	3	7
+9	val_9	3	7
+341	val_341	3	7
+498	val_498	3	7
+146	val_146	3	7
+458	val_458	3	7
+362	val_362	3	7
+186	val_186	3	7
+285	val_285	3	7
+348	val_348	3	7
+167	val_167	3	7
+18	val_18	3	7
+273	val_273	3	7
+183	val_183	3	7
+281	val_281	3	7
+344	val_344	3	7
+97	val_97	3	7
+469	val_469	3	7
+315	val_315	3	7
+84	val_84	3	7
+28	val_28	3	7
+37	val_37	3	7
+448	val_448	3	7
+152	val_152	3	7
+348	val_348	3	7
+307	val_307	3	7
+194	val_194	3	7
+414	val_414	3	7
+477	val_477	3	7
+222	val_222	3	7
+126	val_126	3	7
+90	val_90	3	7
+169	val_169	3	7
+403	val_403	3	7
+400	val_400	3	7
+200	val_200	3	7
+97	val_97	3	7
+query: drop table dest1

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input5.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input5.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input5.q.out Wed Sep 30 05:17:14 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src_thrift) lint) (. (TOK_TABLE_OR_COL src_thrift) lintstring)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue)))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src_thrift) lint) (. (TOK_TABLE_OR_COL src_thrift) lintstring)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -87,7 +87,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/120418612/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_1/build/ql/tmp/904916919/10000
 [0,0,0]	[{"myint":0,"mystring":"0","underscore_int":0}]
 [1,2,3]	[{"myint":1,"mystring":"1","underscore_int":1}]
 [2,4,6]	[{"myint":4,"mystring":"8","underscore_int":2}]

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce1.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce1.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce1.q.out Wed Sep 30 05:17:14 2009
@@ -7,7 +7,7 @@
 DISTRIBUTE BY tvalue, tkey
 SORT BY ten, one
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ten)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL one)))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ten)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL one)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -101,7 +101,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1222794059/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_0/build/ql/tmp/1192171972/10000
 0	0	0	val_0
 0	0	0	val_0
 0	0	0	val_0

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce2.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce2.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce2.q.out Wed Sep 30 05:17:14 2009
@@ -6,7 +6,7 @@
 USING '/bin/cat' AS (tkey, ten, one, tvalue)
 DISTRIBUTE BY tvalue, tkey
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -94,7 +94,7 @@
 Output: default/dest1
 query: SELECT * FROM (SELECT dest1.* FROM dest1 DISTRIBUTE BY key SORT BY key, ten, one, value) T
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1799546978/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_1/build/ql/tmp/218629990/10000
 0	0	0	val_0
 0	0	0	val_0
 0	0	0	val_0

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce3.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce3.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce3.q.out Wed Sep 30 05:17:14 2009
@@ -6,7 +6,7 @@
 USING '/bin/cat' AS (tkey, ten, one, tvalue)
 SORT BY tvalue, tkey
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tvalue)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tkey)))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tvalue)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tkey)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -94,7 +94,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1654922670/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_2/build/ql/tmp/1860104885/10000
 0	0	0	val_0
 0	0	0	val_0
 0	0	0	val_0

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce4.q.out?rev=820179&r1=820178&r2=820179&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce4.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/mapreduce4.q.out Wed Sep 30 05:17:14 2009
@@ -7,7 +7,7 @@
 DISTRIBUTE BY tvalue, tkey
 SORT BY ten DESC, one ASC
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey)) (TOK_SORTBY (TOK_TABSORTCOLNAMEDESC (TOK_TABLE_OR_COL ten)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL one)))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey)) (TOK_SORTBY (TOK_TABSORTCOLNAMEDESC (TOK_TABLE_OR_COL ten)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL one)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -101,7 +101,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/354616090/10000
+Output: file:/data/users/zshao/tools/deploy-branch-0.4-apache-hive/.ptest_0/build/ql/tmp/1848020289/10000
 90	9	0	val_90
 90	9	0	val_90
 90	9	0	val_90