You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by at...@apache.org on 2009/01/21 01:57:50 UTC

svn commit: r736181 [1/2] - in /hadoop/hive/trunk: ./ ant/src/org/apache/hadoop/hive/ant/ metastore/src/java/org/apache/hadoop/hive/metastore/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/queries/clientnegative/ ql/src/test/queries/clientpo...

Author: athusoo
Date: Tue Jan 20 16:57:48 2009
New Revision: 736181

URL: http://svn.apache.org/viewvc?rev=736181&view=rev
Log:
HIVE-25. Enable Table aliases in cluster by, distribute by and sort
by clauses (Prasad Chakka via athusoo)


Added:
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern1.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern2.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern3.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern4.q
    hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern4.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
    hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreClient.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter1.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/cluster.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input21.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join0.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sort.q.out
    hadoop/hive/trunk/ql/src/test/results/compiler/parse/input20.q.out
    hadoop/hive/trunk/ql/src/test/results/compiler/parse/input4.q.out
    hadoop/hive/trunk/ql/src/test/results/compiler/parse/input5.q.out
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby5.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input3.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_part1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join5.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join7.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/subq.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/union.q.xml

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Tue Jan 20 16:57:48 2009
@@ -214,3 +214,6 @@
 
     HIVE-239. Check that replace columns in alter table does not have names
     that are same as the partitioning columns (Prasad Chakka via athusoo)
+
+    HIVE-25. Enable Table aliases in cluster by, distribute by and sort
+    by clauses (Prasad Chakka via athusoo)

Modified: hadoop/hive/trunk/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java (original)
+++ hadoop/hive/trunk/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java Tue Jan 20 16:57:48 2009
@@ -20,6 +20,7 @@
 
 import java.io.*;
 import java.util.StringTokenizer;
+import java.util.Arrays;
 
 import org.apache.tools.ant.AntClassLoader;
 import org.apache.tools.ant.BuildException;
@@ -206,6 +207,7 @@
       }
       else {
         qFiles = inpDir.listFiles(new QFileFilter());
+        Arrays.sort(qFiles);
       }
 
       // Make sure the output directory exists, if it doesn't

Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreClient.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreClient.java?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreClient.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreClient.java Tue Jan 20 16:57:48 2009
@@ -329,8 +329,11 @@
    */
   public Table getTable(String dbName, String tableName) throws MetaException,
     TException, NoSuchObjectException {
-    throw new UnsupportedOperationException("getTable from a specific db " +
-    		"not supported by this metastore");
+    if(dbName.equalsIgnoreCase(MetaStoreUtils.DEFAULT_DATABASE_NAME)) {
+      Properties schema = this.getSchema(tableName);
+      return MetaStoreUtils.getTable(conf, schema);
+    }
+    throw new UnsupportedOperationException("Operation not supported in this metastore");
   }
   
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Tue Jan 20 16:57:48 2009
@@ -345,6 +345,12 @@
     ->                  ^(TOK_TABSORTCOLNAMEDESC Identifier)
     ;
 
+columnRefOrder
+    : tableColumn (asc=KW_ASC | desc=KW_DESC)? 
+    -> {$desc == null}? ^(TOK_TABSORTCOLNAMEASC tableColumn)
+    ->                  ^(TOK_TABSORTCOLNAMEDESC tableColumn)
+    ;
+
 columnNameType
     : colName=Identifier colType (KW_COMMENT comment=StringLiteral)?    
     -> {$comment == null}? ^(TOK_TABCOL $colName colType)
@@ -619,20 +625,20 @@
 clusterByClause
     :
     KW_CLUSTER KW_BY
-    Identifier
-    ( COMMA Identifier )* -> ^(TOK_CLUSTERBY Identifier+)
+    tableColumn
+    ( COMMA tableColumn )* -> ^(TOK_CLUSTERBY tableColumn+)
     ;
 
 distributeByClause:
     KW_DISTRIBUTE KW_BY
-    Identifier
-    ( COMMA Identifier )* -> ^(TOK_DISTRIBUTEBY Identifier+)
+    tableColumn
+    ( COMMA tableColumn )* -> ^(TOK_DISTRIBUTEBY tableColumn+)
     ;
 
 sortByClause:
     KW_SORT KW_BY
-    columnNameOrder
-    ( COMMA columnNameOrder)* -> ^(TOK_SORTBY columnNameOrder+)
+    columnRefOrder
+    ( COMMA columnRefOrder)* -> ^(TOK_SORTBY columnRefOrder+)
     ;
 
 // fun(par1, par2, par3)

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Tue Jan 20 16:57:48 2009
@@ -953,7 +953,7 @@
  
       exprNodeColumnDesc expr = new exprNodeColumnDesc(colInfo.getType(), name);
       col_list.add(expr);
-      output.put(alias, tmp[1], new ColumnInfo(pos.toString(), colInfo.getType()));
+      output.put(tmp[0], tmp[1], new ColumnInfo(pos.toString(), colInfo.getType()));
       pos = Integer.valueOf(pos.intValue() + 1);
     }
   }
@@ -1063,13 +1063,24 @@
     }
   }
   
-  private static String getColAlias(ASTNode selExpr, String defaultName) {
+  private static String[] getColAlias(ASTNode selExpr, String defaultName) {
+    String colAlias = null;
+    String tabAlias = null;
+    String[] colRef = new String[2];
     if (selExpr.getChildCount() == 2) {
       // return zz for "xx + yy AS zz"
-      return unescapeIdentifier(selExpr.getChild(1).getText()); 
+      colAlias  = unescapeIdentifier(selExpr.getChild(1).getText());
+      colRef[0] = tabAlias;
+      colRef[1] = colAlias;
+      return colRef;
     }
 
-    ASTNode root = (ASTNode)selExpr.getChild(0);
+    ASTNode root = (ASTNode) selExpr.getChild(0);
+    if (root.getType() == HiveParser.TOK_COLREF && root.getChildCount() > 1) {
+      ASTNode tab = (ASTNode) root.getChild(0);
+      tabAlias = unescapeIdentifier(tab.getText());
+    }
+  
     while (root.getType() == HiveParser.DOT || root.getType() == HiveParser.TOK_COLREF) {
       if (root.getType() == HiveParser.TOK_COLREF && root.getChildCount() == 1) {
         root = (ASTNode) root.getChild(0);
@@ -1081,11 +1092,15 @@
     }
     if (root.getType() == HiveParser.Identifier) {
       // Return zz for "xx.zz" and "xx.yy.zz"
-      return unescapeIdentifier(root.getText());
-    } else {
+      colAlias = unescapeIdentifier(root.getText());
+    }
+    if(colAlias == null) {
       // Return defaultName if selExpr is not a simple xx.yy.zz 
-      return defaultName;
+      colAlias = defaultName;
     }
+    colRef[0] = tabAlias;
+    colRef[1] = colAlias;
+    return colRef;
   }
   
   @SuppressWarnings("nls")
@@ -1108,11 +1123,13 @@
 
       // list of the columns
       ASTNode selExpr = (ASTNode) selExprList.getChild(i);
-      String colAlias = getColAlias(selExpr, "_C" + i);
+      String[] colRef = getColAlias(selExpr, "_C" + i);
+      String colAlias = colRef[1];
+      String tabAlias = colRef[0];
       ASTNode sel = (ASTNode)selExpr.getChild(0);
       
       if (sel.getToken().getType() == HiveParser.TOK_ALLCOLREF) {
-        String tabAlias = null;
+        tabAlias = null;
         if (sel.getChildCount() == 1)
           tabAlias = unescapeIdentifier(sel.getChild(0).getText().toLowerCase());
         genColList(tabAlias, alias, sel, col_list, inputRR, pos, out_rwsch);
@@ -1126,7 +1143,7 @@
         for (int j = 0; j < cols.getChildCount(); ++j) {
           ASTNode expr = (ASTNode) cols.getChild(j);
           if (expr.getToken().getType() == HiveParser.TOK_ALLCOLREF) {
-            String tabAlias = null;
+            tabAlias = null;
             if (sel.getChildCount() == 1)
               tabAlias = unescapeIdentifier(sel.getChild(0).getText().toLowerCase());
 
@@ -1136,11 +1153,11 @@
             exprNodeDesc exp = genExprNodeDesc(qb.getMetaData(), expr, inputRR);
             col_list.add(exp);
             if (!StringUtils.isEmpty(alias) &&
-                (out_rwsch.get(alias, colAlias) != null)) {
+                (out_rwsch.get(null, colAlias) != null)) {
               throw new SemanticException(ErrorMsg.AMBIGOUS_COLUMN.getMsg(expr.getChild(1)));
             }
 
-            out_rwsch.put(alias, unescapeIdentifier(expr.getText()),
+            out_rwsch.put(tabAlias, unescapeIdentifier(expr.getText()),
                           new ColumnInfo((Integer.valueOf(pos)).toString(),
                                          exp.getTypeInfo()));
           }
@@ -1150,12 +1167,12 @@
         exprNodeDesc exp = genExprNodeDesc(qb.getMetaData(), sel, inputRR);
         col_list.add(exp);
         if (!StringUtils.isEmpty(alias) &&
-            (out_rwsch.get(alias, colAlias) != null)) {
+            (out_rwsch.get(null, colAlias) != null)) {
           throw new SemanticException(ErrorMsg.AMBIGOUS_COLUMN.getMsg(sel.getChild(1)));
         }
         // Since the as clause is lacking we just use the text representation
         // of the expression as the column name
-        out_rwsch.put(alias, colAlias,
+        out_rwsch.put(tabAlias, colAlias,
                       new ColumnInfo((Integer.valueOf(pos)).toString(),
                                      exp.getTypeInfo()));
       }
@@ -2166,12 +2183,7 @@
       int ccount = partitionExprs.getChildCount();
       for(int i=0; i<ccount; ++i) {
         ASTNode cl = (ASTNode)partitionExprs.getChild(i);
-        ColumnInfo colInfo = inputRR.get(qb.getParseInfo().getAlias(),
-                                         unescapeIdentifier(cl.getText()));
-        if (colInfo == null) {
-          throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(cl));
-        }
-        partitionCols.add(new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName()));
+        partitionCols.add(genExprNodeDescFromColRef(cl, inputRR));
       }
     }
 
@@ -2200,13 +2212,7 @@
           order.append("+");
         }
 
-        ColumnInfo colInfo = inputRR.get(qb.getParseInfo().getAlias(),
-                                         unescapeIdentifier(cl.getText()));
-        if (colInfo == null) {
-          throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(cl));
-        }
-        
-        sortCols.add(new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName()));
+        sortCols.add(genExprNodeDescFromColRef(cl, inputRR));
       }
     }
 
@@ -2720,6 +2726,19 @@
         }
         curr = genFileSinkPlan(dest, qb, curr);
       }
+      
+      // change curr ops row resolver's tab aliases to query alias if it exists
+      if(qb.getParseInfo().getAlias() != null) {
+        RowResolver rr = opParseCtx.get(curr).getRR();
+        RowResolver newRR = new RowResolver();
+        String alias = qb.getParseInfo().getAlias();
+        for(ColumnInfo colInfo: rr.getColumnInfos()) {
+          String name = colInfo.getInternalName();
+          String [] tmp = rr.reverseLookup(name);
+          newRR.put(alias, tmp[1], colInfo);
+        }
+        opParseCtx.get(curr).setRR(newRR);
+      }
     }
 
     LOG.debug("Created Body Plan for Query Block " + qb.getId());
@@ -3352,33 +3371,7 @@
     int tokType = expr.getType();
     switch (tokType) {
       case HiveParser.TOK_COLREF: {
-
-        String tabAlias = null;
-        String colName = null;
-        if (expr.getChildCount() != 1) {
-          tabAlias = unescapeIdentifier(expr.getChild(0).getText());
-          colName = unescapeIdentifier(expr.getChild(1).getText());
-        }
-        else {
-          colName = unescapeIdentifier(expr.getChild(0).getText());
-        }
-
-        if (colName == null) {
-          throw new SemanticException(ErrorMsg.INVALID_XPATH.getMsg(expr));
-        }
-
-        colInfo = input.get(tabAlias, colName);
-
-        if (colInfo == null && input.getIsExprResolver()) {
-          throw new SemanticException(ErrorMsg.NON_KEY_EXPR_IN_GROUPBY.getMsg(expr));
-        }         
-        else if (tabAlias != null && !input.hasTableAlias(tabAlias)) {
-          throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(expr.getChild(0)));
-        } else if (colInfo == null) {
-          throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(tabAlias == null? expr.getChild(0) : expr.getChild(1)));
-        }
-
-        desc = new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName());
+        desc = genExprNodeDescFromColRef(expr, input);
         break;
       }
   
@@ -3401,6 +3394,49 @@
     return desc;
   }
 
+  /**
+   * Generates expression node from a TOK_COLREF AST Node
+   * @param expr Antrl node
+   * @param input row resolver for this col reference
+   * @return exprNodeDesc or null if ASTNode is not a TOK_COLREF
+   * @throws SemanticException
+   */
+  private exprNodeDesc genExprNodeDescFromColRef(ASTNode expr, RowResolver input)
+      throws SemanticException {
+    if(expr.getType() != HiveParser.TOK_COLREF) {
+      throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(expr));
+    }
+    exprNodeDesc desc;
+    ColumnInfo colInfo;
+    String tabAlias = null;
+    String colName = null;
+    if (expr.getChildCount() != 1) {
+      tabAlias = unescapeIdentifier(expr.getChild(0).getText());
+      colName = unescapeIdentifier(expr.getChild(1).getText());
+    }
+    else {
+      colName = unescapeIdentifier(expr.getChild(0).getText());
+    }
+
+    if (colName == null) {
+      throw new SemanticException(ErrorMsg.INVALID_XPATH.getMsg(expr));
+    }
+
+    colInfo = input.get(tabAlias, colName);
+
+    if (colInfo == null && input.getIsExprResolver()) {
+      throw new SemanticException(ErrorMsg.NON_KEY_EXPR_IN_GROUPBY.getMsg(expr));
+    }         
+    else if (tabAlias != null && !input.hasTableAlias(tabAlias)) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(expr.getChild(0)));
+    } else if (colInfo == null) {
+      throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(tabAlias == null? expr.getChild(0) : expr.getChild(1)));
+    }
+
+    desc = new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName());
+    return desc;
+  }
+
   static HashMap<Integer, String> specialUnaryOperatorTextHashMap;
   static HashMap<Integer, String> specialFunctionTextHashMap;
   static HashMap<Integer, String> conversionFunctionTextHashMap;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern1.q?rev=736181&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern1.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern1.q Tue Jan 20 16:57:48 2009
@@ -0,0 +1,2 @@
+EXPLAIN
+SELECT x.key, x.value as key FROM SRC x CLUSTER BY key;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern2.q?rev=736181&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern2.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern2.q Tue Jan 20 16:57:48 2009
@@ -0,0 +1,3 @@
+EXPLAIN
+SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) CLUSTER BY key;
+

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern3.q?rev=736181&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern3.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern3.q Tue Jan 20 16:57:48 2009
@@ -0,0 +1,2 @@
+EXPLAIN
+SELECT x.key as k1, x.value FROM SRC x CLUSTER BY x.key;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern4.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern4.q?rev=736181&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern4.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/clustern4.q Tue Jan 20 16:57:48 2009
@@ -0,0 +1,2 @@
+EXPLAIN
+SELECT x.key as k1, x.value FROM SRC x CLUSTER BY key;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter1.q?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/alter1.q Tue Jan 20 16:57:48 2009
@@ -17,4 +17,7 @@
 alter table alter1 set serde 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe';
 describe extended alter1;
 
+alter table alter1 replace columns (a int, b int, c string);
+describe alter1;
+
 drop table alter1;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/cluster.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/cluster.q?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/cluster.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/cluster.q Tue Jan 20 16:57:48 2009
@@ -1,4 +1,65 @@
 EXPLAIN
-SELECT x.* FROM SRC x CLUSTER BY key;
+SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key;
+SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key;
 
-SELECT x.* FROM SRC x CLUSTER BY key;
+EXPLAIN
+SELECT * FROM SRC x  where x.key = 20 CLUSTER BY key ;
+SELECT * FROM SRC x where x.key = 20 CLUSTER BY key ;
+
+EXPLAIN
+SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key;
+SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key;
+
+EXPLAIN
+SELECT x.*  FROM SRC x where x.key = 20 CLUSTER BY x.key;
+SELECT x.*  FROM SRC x where x.key = 20 CLUSTER BY x.key;
+
+EXPLAIN
+SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key ;
+SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key ;
+
+EXPLAIN
+SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key;
+SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key;
+
+EXPLAIN
+SELECT x.key, x.value as v1  FROM SRC x where x.key = 20 CLUSTER BY v1;
+SELECT x.key, x.value as v1  FROM SRC x where x.key = 20 CLUSTER BY v1;
+
+EXPLAIN
+SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20;
+SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20;
+
+
+EXPLAIN 
+SELECT x.key, x.value as v1, y.key  FROM SRC x JOIN SRC y ON (x.key = y.key)  where x.key = 20 CLUSTER BY v1;;
+SELECT x.key, x.value as v1, y.key  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1;
+
+EXPLAIN 
+SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1;
+SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1;
+
+EXPLAIN
+SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key ;
+SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key ;
+
+EXPLAIN
+SELECT x.key, x.value as v1, y.key as yk  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key ;
+SELECT x.key, x.value as v1, y.key as yk  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key ;
+
+EXPLAIN
+SELECT unioninput.*
+FROM (
+  FROM src select src.key, src.value WHERE src.key < 100
+  UNION ALL
+  FROM src SELECT src.* WHERE src.key > 100
+) unioninput
+CLUSTER BY unioninput.key;
+
+SELECT unioninput.*
+FROM (
+  FROM src select src.key, src.value WHERE src.key < 100
+  UNION ALL
+  FROM src SELECT src.* WHERE src.key > 100
+) unioninput
+CLUSTER BY unioninput.key;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q Tue Jan 20 16:57:48 2009
@@ -1,4 +1,5 @@
 -- TestSerDe is a user defined serde where the default delimiter is Ctrl-B
+DROP TABLE INPUT16;
 CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT16;
 SELECT INPUT16.VALUE, INPUT16.KEY FROM INPUT16;

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern1.q.out?rev=736181&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern1.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern1.q.out Tue Jan 20 16:57:48 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Column key Found in more than One Tables/Subqueries

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern2.q.out?rev=736181&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern2.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern2.q.out Tue Jan 20 16:57:48 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Column key Found in more than One Tables/Subqueries

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern3.q.out?rev=736181&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern3.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern3.q.out Tue Jan 20 16:57:48 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 2:52 Invalid Column Reference key

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern4.q.out?rev=736181&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern4.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/clustern4.q.out Tue Jan 20 16:57:48 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 2:50 Invalid Column Reference key

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out Tue Jan 20 16:57:48 2009
@@ -1,28 +1,31 @@
 a	int
 b	int
 Detailed Table Information:
-Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+Table(tableName:alter1,dbName:default,owner:pchakka,createTime:1232158150,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe,parameters:{serialization.format=org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
 a	int
 b	int
 Detailed Table Information:
-Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=njain,c=3,last_modified_time=1225994182,a=1})
+Table(tableName:alter1,dbName:default,owner:pchakka,createTime:1232158150,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe,parameters:{serialization.format=org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=pchakka,c=3,last_modified_time=1232158150,a=1})
 a	int
 b	int
 Detailed Table Information:
-Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1})
+Table(tableName:alter1,dbName:default,owner:pchakka,createTime:1232158150,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe,parameters:{serialization.format=org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=pchakka,c=4,last_modified_time=1232158150,a=1})
 a	int
 b	int
 Detailed Table Information:
-Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1})
+Table(tableName:alter1,dbName:default,owner:pchakka,createTime:1232158150,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe,parameters:{s1=9,serialization.format=org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=pchakka,c=4,last_modified_time=1232158150,a=1})
 a	int
 b	int
 Detailed Table Information:
-Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{s2=20,s1=10,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1})
+Table(tableName:alter1,dbName:default,owner:pchakka,createTime:1232158150,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe,parameters:{s2=20,s1=10,serialization.format=org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=pchakka,c=4,last_modified_time=1232158150,a=1})
 a	string	'from deserializer'
 b	string	'from deserializer'
 Detailed Table Information:
-Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.TestSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1})
+Table(tableName:alter1,dbName:default,owner:pchakka,createTime:1232158150,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.TestSerDe,parameters:{s2=20,s1=9,serialization.format=org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=pchakka,c=4,last_modified_time=1232158150,a=1})
 a	string	'from deserializer'
 b	string	'from deserializer'
 Detailed Table Information:
-Table(tableName:alter1,dbName:default,owner:njain,createTime:1225994182,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/home/njain/workspace/hadoop-0.17/build/contrib/hive/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1225994182,a=1})
+Table(tableName:alter1,dbName:default,owner:pchakka,createTime:1232158150,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{s2=20,s1=9,serialization.format=org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=pchakka,c=4,last_modified_time=1232158150,a=1})
+a	int
+b	int
+c	string

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_CLUSTERBY key)))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_COLREF x key) 10)) (TOK_CLUSTERBY (TOK_COLREF x key))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -10,6 +10,356 @@
     Map Reduce
       Alias -> Map Operator Tree:
         x 
+            Filter Operator
+              predicate:
+                  expr: (key = 10)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                Reduce Output Operator
+                  key expressions:
+                        expr: 0
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: 0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: 0
+                        type: string
+                        expr: 1
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+10	val_10
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_COLREF x key) 20)) (TOK_CLUSTERBY (TOK_COLREF key))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        x 
+            Filter Operator
+              predicate:
+                  expr: (key = 20)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                Reduce Output Operator
+                  key expressions:
+                        expr: 0
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: 0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: 0
+                        type: string
+                        expr: 1
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+20	val_20
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (= (TOK_COLREF x key) 20)) (TOK_CLUSTERBY (TOK_COLREF key))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        x 
+            Filter Operator
+              predicate:
+                  expr: (key = 20)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                Reduce Output Operator
+                  key expressions:
+                        expr: 0
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: 0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: 0
+                        type: string
+                        expr: 1
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+20	val_20
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_WHERE (= (TOK_COLREF x key) 20)) (TOK_CLUSTERBY (TOK_COLREF x key))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        x 
+            Filter Operator
+              predicate:
+                  expr: (key = 20)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                Reduce Output Operator
+                  key expressions:
+                        expr: 0
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: 0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: 0
+                        type: string
+                        expr: 1
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+20	val_20
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF x key)) (TOK_SELEXPR (TOK_COLREF x value) v1)) (TOK_WHERE (= (TOK_COLREF x key) 20)) (TOK_CLUSTERBY (TOK_COLREF key))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        x 
+            Filter Operator
+              predicate:
+                  expr: (key = 20)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                Reduce Output Operator
+                  key expressions:
+                        expr: 0
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: 0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: 0
+                        type: string
+                        expr: 1
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+20	val_20
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF x key)) (TOK_SELEXPR (TOK_COLREF x value) v1)) (TOK_WHERE (= (TOK_COLREF x key) 20)) (TOK_CLUSTERBY (TOK_COLREF x key))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        x 
+            Filter Operator
+              predicate:
+                  expr: (key = 20)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                Reduce Output Operator
+                  key expressions:
+                        expr: 0
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: 0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: 0
+                        type: string
+                        expr: 1
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+20	val_20
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF x key)) (TOK_SELEXPR (TOK_COLREF x value) v1)) (TOK_WHERE (= (TOK_COLREF x key) 20)) (TOK_CLUSTERBY (TOK_COLREF v1))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        x 
+            Filter Operator
+              predicate:
+                  expr: (key = 20)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                Reduce Output Operator
+                  key expressions:
+                        expr: 1
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: 1
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: 0
+                        type: string
+                        expr: 1
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+20	val_20
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x))) (TOK_CLUSTERBY (TOK_COLREF x key)))) y)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF y))) (TOK_WHERE (= (TOK_COLREF y key) 20))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        y:x 
             Select Operator
               expressions:
                     expr: key
@@ -32,6 +382,523 @@
                       type: string
       Reduce Operator Tree:
         Extract
+          Filter Operator
+            predicate:
+                expr: (0 = 20)
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: 0
+                    type: string
+                    expr: 1
+                    type: string
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+20	val_20
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF SRC x) (TOK_TABREF SRC y) (= (TOK_COLREF x key) (TOK_COLREF y key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF x key)) (TOK_SELEXPR (TOK_COLREF x value) v1) (TOK_SELEXPR (TOK_COLREF y key))) (TOK_WHERE (= (TOK_COLREF x key) 20)) (TOK_CLUSTERBY (TOK_COLREF v1))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        y 
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              Reduce Output Operator
+                key expressions:
+                      expr: 0
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: 0
+                      type: string
+                tag: 1
+                value expressions:
+                      expr: 0
+                      type: string
+        x 
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE.0} {VALUE.1}
+            1 {VALUE.0}
+          Filter Operator
+            predicate:
+                expr: (0 = 20)
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: 0
+                    type: string
+                    expr: 1
+                    type: string
+                    expr: 2
+                    type: string
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                    name: binary_table
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        /data/users/pchakka/workspace/oshive/build/ql/tmp/4095681/630596716.10002 
+          Reduce Output Operator
+            key expressions:
+                  expr: 1
+                  type: string
+            sort order: +
+            Map-reduce partition columns:
+                  expr: 1
+                  type: string
+            tag: -1
+            value expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: string
+                  expr: 2
+                  type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+20	val_20	20
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF SRC x) (TOK_TABREF SRC y) (= (TOK_COLREF x key) (TOK_COLREF y key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF x key)) (TOK_SELEXPR (TOK_COLREF x value) v1) (TOK_SELEXPR (TOK_ALLCOLREF y))) (TOK_WHERE (= (TOK_COLREF x key) 20)) (TOK_CLUSTERBY (TOK_COLREF v1))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        y 
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+        x 
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE.0} {VALUE.1}
+            1 {VALUE.0} {VALUE.1}
+          Filter Operator
+            predicate:
+                expr: (0 = 20)
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: 0
+                    type: string
+                    expr: 1
+                    type: string
+                    expr: 2
+                    type: string
+                    expr: 3
+                    type: string
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                    name: binary_table
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        /data/users/pchakka/workspace/oshive/build/ql/tmp/462292647/163669153.10002 
+          Reduce Output Operator
+            key expressions:
+                  expr: 1
+                  type: string
+            sort order: +
+            Map-reduce partition columns:
+                  expr: 1
+                  type: string
+            tag: -1
+            value expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: string
+                  expr: 2
+                  type: string
+                  expr: 3
+                  type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+20	val_20	20	val_20
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF SRC x) (TOK_TABREF SRC y) (= (TOK_COLREF x key) (TOK_COLREF y key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF x key)) (TOK_SELEXPR (TOK_COLREF x value) v1) (TOK_SELEXPR (TOK_ALLCOLREF y))) (TOK_WHERE (= (TOK_COLREF x key) 20)) (TOK_CLUSTERBY (TOK_COLREF x key))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        y 
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+        x 
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE.0} {VALUE.1}
+            1 {VALUE.0} {VALUE.1}
+          Filter Operator
+            predicate:
+                expr: (0 = 20)
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: 0
+                    type: string
+                    expr: 1
+                    type: string
+                    expr: 2
+                    type: string
+                    expr: 3
+                    type: string
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                    name: binary_table
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        /data/users/pchakka/workspace/oshive/build/ql/tmp/72781939/1364102870.10002 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            sort order: +
+            Map-reduce partition columns:
+                  expr: 0
+                  type: string
+            tag: -1
+            value expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: string
+                  expr: 2
+                  type: string
+                  expr: 3
+                  type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+20	val_20	20	val_20
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF SRC x) (TOK_TABREF SRC y) (= (TOK_COLREF x key) (TOK_COLREF y key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF x key)) (TOK_SELEXPR (TOK_COLREF x value) v1) (TOK_SELEXPR (TOK_COLREF y key) yk)) (TOK_WHERE (= (TOK_COLREF x key) 20)) (TOK_CLUSTERBY (TOK_COLREF key))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        y 
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              Reduce Output Operator
+                key expressions:
+                      expr: 0
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: 0
+                      type: string
+                tag: 1
+                value expressions:
+                      expr: 0
+                      type: string
+        x 
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE.0} {VALUE.1}
+            1 {VALUE.0}
+          Filter Operator
+            predicate:
+                expr: (0 = 20)
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: 0
+                    type: string
+                    expr: 1
+                    type: string
+                    expr: 2
+                    type: string
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.mapred.SequenceFileOutputFormat
+                    name: binary_table
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        /data/users/pchakka/workspace/oshive/build/ql/tmp/808635/840293573.10002 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            sort order: +
+            Map-reduce partition columns:
+                  expr: 0
+                  type: string
+            tag: -1
+            value expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: string
+                  expr: 2
+                  type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+20	val_20	20
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_COLREF src value))) (TOK_WHERE (< (TOK_COLREF src key) 100)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF src))) (TOK_WHERE (> (TOK_COLREF src key) 100))))) unioninput)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF unioninput))) (TOK_CLUSTERBY (TOK_COLREF unioninput key))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        null-subquery1:unioninput-subquery1:src 
+            Filter Operator
+              predicate:
+                  expr: (key < 100)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                  Select Operator
+                    expressions:
+                          expr: 0
+                          type: string
+                          expr: 1
+                          type: string
+                    Reduce Output Operator
+                      key expressions:
+                            expr: 0
+                            type: string
+                      sort order: +
+                      Map-reduce partition columns:
+                            expr: 0
+                            type: string
+                      tag: -1
+                      value expressions:
+                            expr: 0
+                            type: string
+                            expr: 1
+                            type: string
+        null-subquery2:unioninput-subquery2:src 
+            Filter Operator
+              predicate:
+                  expr: (key > 100)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                  Select Operator
+                    expressions:
+                          expr: 0
+                          type: string
+                          expr: 1
+                          type: string
+                    Reduce Output Operator
+                      key expressions:
+                            expr: 0
+                            type: string
+                      sort order: +
+                      Map-reduce partition columns:
+                            expr: 0
+                            type: string
+                      tag: -1
+                      value expressions:
+                            expr: 0
+                            type: string
+                            expr: 1
+                            type: string
+      Reduce Operator Tree:
+        Extract
           File Output Operator
             compressed: false
             table:
@@ -47,8 +914,6 @@
 0	val_0
 0	val_0
 10	val_10
-100	val_100
-100	val_100
 103	val_103
 103	val_103
 104	val_104

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY tkey))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))) (TOK_WHERE (< (TOK_COLREF tmap tkey) 100))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_COLREF tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))) (TOK_WHERE (< (TOK_COLREF tmap tkey) 100))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY tkey) (TOK_LIMIT 20))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))) (TOK_WHERE (< (TOK_COLREF tmap tkey) 100))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_COLREF tkey)) (TOK_LIMIT 20))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue))) (TOK_WHERE (< (TOK_COLREF tmap tkey) 100))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -49,7 +49,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-zshao/2396737/195622561.10001 
+        /data/users/pchakka/workspace/oshive/build/ql/tmp/557187098/238412176.10001 
           Reduce Output Operator
             key expressions:
                   expr: 0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (+ (TOK_COLREF src_thrift aint) ([ (TOK_COLREF src_thrift lint) 0)) ([ (TOK_COLREF src_thrift lintstring) 0)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY tkey))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue)))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (+ (TOK_COLREF src_thrift aint) ([ (TOK_COLREF src_thrift lint) 0)) ([ (TOK_COLREF src_thrift lintstring) 0)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_COLREF tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_COLREF src value) (+ 1 2) (+ 3 4)) '/bin/cat'))) (TOK_CLUSTERBY key))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap key)) (TOK_SELEXPR (TOK_FUNCTION regexp_replace (TOK_COLREF tmap value) '\t' '+'))) (TOK_WHERE (< (TOK_COLREF tmap key) 100))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_COLREF src value) (+ 1 2) (+ 3 4)) '/bin/cat'))) (TOK_CLUSTERBY (TOK_COLREF key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap key)) (TOK_SELEXPR (TOK_FUNCTION regexp_replace (TOK_COLREF tmap value) '\t' '+'))) (TOK_WHERE (< (TOK_COLREF tmap key) 100))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (% (TOK_COLREF src key) 2) (% (TOK_COLREF src key) 5)) 'cat'))) (TOK_CLUSTERBY key))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF tmap key) (TOK_COLREF tmap value)) 'uniq -c | sed "s@^ *@@" | sed "s@\t@_@" | sed "s@ @\t@"' (TOK_ALIASLIST key value))))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (% (TOK_COLREF src key) 2) (% (TOK_COLREF src key) 5)) 'cat'))) (TOK_CLUSTERBY (TOK_COLREF key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF tmap key) (TOK_COLREF tmap value)) 'uniq -c | sed "s@^ *@@" | sed "s@\t@_@" | sed "s@ @\t@"' (TOK_ALIASLIST key value))))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input21.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input21.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input21.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input21.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src_null)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_DISTRIBUTEBY c) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC d))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src_null)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_DISTRIBUTEBY (TOK_COLREF c)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_COLREF d)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src_thrift lint) (TOK_COLREF src_thrift lintstring)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY tkey))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue)))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src_thrift lint) (TOK_COLREF src_thrift lintstring)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_COLREF tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF tmap tkey)) (TOK_SELEXPR (TOK_COLREF tmap tvalue)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join0.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join0.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join0.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join0.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_COLREF src key) 10)))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_COLREF src key) 10)))) src2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 key) k1) (TOK_SELEXPR (TOK_COLREF src1 value) v1) (TOK_SELEXPR (TOK_COLREF src2 key) k2) (TOK_SELEXPR (TOK_COLREF src2 value) v2)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC k1) (TOK_TABSORTCOLNAMEASC v1) (TOK_TABSORTCOLNAMEASC k2) (TOK_TABSORTCOLNAMEASC v2))))
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_COLREF src key) 10)))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (TOK_COLREF src key) 10)))) src2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src1 key) k1) (TOK_SELEXPR (TOK_COLREF src1 value) v1) (TOK_SELEXPR (TOK_COLREF src2 key) k2) (TOK_SELEXPR (TOK_COLREF src2 value) v2)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_COLREF k1)) (TOK_TABSORTCOLNAMEASC (TOK_COLREF v1)) (TOK_TABSORTCOLNAMEASC (TOK_COLREF k2)) (TOK_TABSORTCOLNAMEASC (TOK_COLREF v2)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -75,7 +75,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-zshao/909665613/33076823.10002 
+        /data/users/pchakka/workspace/oshive/build/ql/tmp/1608930740/15144587.10002 
           Reduce Output Operator
             key expressions:
                   expr: 0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_FUNCTION TOK_INT (/ (TOK_COLREF src key) 10)) (TOK_FUNCTION TOK_INT (% (TOK_COLREF src key) 10)) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY tvalue tkey) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC ten) (TOK_TABSORTCOLNAMEASC one))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_FUNCTION TOK_INT (/ (TOK_COLREF src key) 10)) (TOK_FUNCTION TOK_INT (% (TOK_COLREF src key) 10)) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_COLREF tvalue) (TOK_COLREF tkey)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_COLREF ten)) (TOK_TABSORTCOLNAMEASC (TOK_COLREF one)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_FUNCTION TOK_INT (/ (TOK_COLREF src key) 10)) (TOK_FUNCTION TOK_INT (% (TOK_COLREF src key) 10)) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY tvalue tkey)))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_FUNCTION TOK_INT (/ (TOK_COLREF src key) 10)) (TOK_FUNCTION TOK_INT (% (TOK_COLREF src key) 10)) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_COLREF tvalue) (TOK_COLREF tkey))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_FUNCTION TOK_INT (/ (TOK_COLREF src key) 10)) (TOK_FUNCTION TOK_INT (% (TOK_COLREF src key) 10)) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC tvalue) (TOK_TABSORTCOLNAMEASC tkey))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_FUNCTION TOK_INT (/ (TOK_COLREF src key) 10)) (TOK_FUNCTION TOK_INT (% (TOK_COLREF src key) 10)) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_COLREF tvalue)) (TOK_TABSORTCOLNAMEASC (TOK_COLREF tkey)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_FUNCTION TOK_INT (/ (TOK_COLREF src key) 10)) (TOK_FUNCTION TOK_INT (% (TOK_COLREF src key) 10)) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY tvalue tkey) (TOK_SORTBY (TOK_TABSORTCOLNAMEDESC ten) (TOK_TABSORTCOLNAMEASC one))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_FUNCTION TOK_INT (/ (TOK_COLREF src key) 10)) (TOK_FUNCTION TOK_INT (% (TOK_COLREF src key) 10)) (TOK_COLREF src value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_COLREF tvalue) (TOK_COLREF tkey)) (TOK_SORTBY (TOK_TABSORTCOLNAMEDESC (TOK_COLREF ten)) (TOK_TABSORTCOLNAMEASC (TOK_COLREF one)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out?rev=736181&r1=736180&r2=736181&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out Tue Jan 20 16:57:48 2009
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key) c1) (TOK_SELEXPR (TOK_FUNCTION TOK_INT (/ (TOK_COLREF src key) 10)) c2) (TOK_SELEXPR (TOK_FUNCTION TOK_INT (% (TOK_COLREF src key) 10)) c3) (TOK_SELEXPR (TOK_COLREF src value) c4)) (TOK_DISTRIBUTEBY c4 c1) (TOK_SORTBY (TOK_TABSORTCOLNAMEDESC c2) (TOK_TABSORTCOLNAMEASC c3))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key) c1) (TOK_SELEXPR (TOK_FUNCTION TOK_INT (/ (TOK_COLREF src key) 10)) c2) (TOK_SELEXPR (TOK_FUNCTION TOK_INT (% (TOK_COLREF src key) 10)) c3) (TOK_SELEXPR (TOK_COLREF src value) c4)) (TOK_DISTRIBUTEBY (TOK_COLREF c4) (TOK_COLREF c1)) (TOK_SORTBY (TOK_TABSORTCOLNAMEDESC (TOK_COLREF c2)) (TOK_TABSORTCOLNAMEASC (TOK_COLREF c3)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage