You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/02/04 20:44:01 UTC

svn commit: r906619 - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/

Author: zshao
Date: Thu Feb  4 19:44:01 2010
New Revision: 906619

URL: http://svn.apache.org/viewvc?rev=906619&view=rev
Log:
HIVE-1124. Create view should expand the query text consistently. (John Sichi via zshao)

Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_view.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=906619&r1=906618&r2=906619&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Thu Feb  4 19:44:01 2010
@@ -130,6 +130,9 @@
     HIVE-1125. Correct console output message.
     (Paul Yang via namit)
 
+    HIVE-1124. Create view should expand the query text consistently.
+    (John Sichi via zshao)
+
 Release 0.5.0 -  Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java?rev=906619&r1=906618&r2=906619&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java Thu Feb  4 19:44:01 2010
@@ -39,6 +39,7 @@
   private final HashMap<String, LinkedHashMap<String, ColumnInfo>> rslvMap;
 
   private final HashMap<String, String[]> invRslvMap;
+  private final Map<String, ASTNode> expressionMap;
 
   // TODO: Refactor this and do in a more object oriented manner
   private boolean isExprResolver;
@@ -50,9 +51,39 @@
     rowSchema = new RowSchema();
     rslvMap = new HashMap<String, LinkedHashMap<String, ColumnInfo>>();
     invRslvMap = new HashMap<String, String[]>();
+    expressionMap = new HashMap<String, ASTNode>();
     isExprResolver = false;
   }
 
+  /**
+   * Puts a resolver entry corresponding to a source expression which is to be
+   * used for identical expression recognition (e.g. for matching expressions
+   * in the SELECT list with the GROUP BY clause).  The convention for such
+   * entries is an empty-string ("") as the table alias together with the
+   * string rendering of the ASTNode as the column alias.
+   */
+  public void putExpression(ASTNode node, ColumnInfo colInfo) {
+    String treeAsString = node.toStringTree();
+    expressionMap.put(treeAsString, node);
+    put("", treeAsString, colInfo);
+  }
+
+  /**
+   * Retrieves the ColumnInfo corresponding to a source expression which
+   * exactly matches the string rendering of the given ASTNode.
+   */
+  public ColumnInfo getExpression(ASTNode node) throws SemanticException {
+    return get("", node.toStringTree());
+  }
+
+  /**
+   * Retrieves the source expression matching a given ASTNode's
+   * string rendering exactly.
+   */
+  public ASTNode getExpressionSource(ASTNode node) {
+    return expressionMap.get(node.toStringTree());
+  }
+
   public void put(String tab_alias, String col_alias, ColumnInfo colInfo) {
     if (tab_alias != null) {
       tab_alias = tab_alias.toLowerCase();

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=906619&r1=906618&r2=906619&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Thu Feb  4 19:44:01 2010
@@ -1980,8 +1980,7 @@
     List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
     for (int i = 0; i < grpByExprs.size(); ++i) {
       ASTNode grpbyExpr = grpByExprs.get(i);
-      String text = grpbyExpr.toStringTree();
-      ColumnInfo exprInfo = groupByInputRowResolver.get("", text);
+      ColumnInfo exprInfo = groupByInputRowResolver.getExpression(grpbyExpr);
 
       if (exprInfo == null) {
         throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr));
@@ -1991,7 +1990,7 @@
           .getInternalName(), "", false));
       String field = getColumnInternalName(i);
       outputColumnNames.add(field);
-      groupByOutputRowResolver.put("", grpbyExpr.toStringTree(),
+      groupByOutputRowResolver.putExpression(grpbyExpr,
           new ColumnInfo(field, exprInfo.getType(), null, false));
       colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
     }
@@ -2009,9 +2008,9 @@
       ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
       // 0 is the function name
       for (int i = 1; i < value.getChildCount(); i++) {
-        String text = value.getChild(i).toStringTree();
         ASTNode paraExpr = (ASTNode) value.getChild(i);
-        ColumnInfo paraExprInfo = groupByInputRowResolver.get("", text);
+        ColumnInfo paraExprInfo =
+          groupByInputRowResolver.getExpression(paraExpr);
         if (paraExprInfo == null) {
           throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(paraExpr));
         }
@@ -2036,7 +2035,7 @@
       String field = getColumnInternalName(groupByKeys.size()
           + aggregations.size() - 1);
       outputColumnNames.add(field);
-      groupByOutputRowResolver.put("", value.toStringTree(), new ColumnInfo(
+      groupByOutputRowResolver.putExpression(value, new ColumnInfo(
           field, udaf.returnType, "", false));
       // Save the evaluator so that it can be used by the next-stage
       // GroupByOperators
@@ -2082,8 +2081,7 @@
     Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
     for (int i = 0; i < grpByExprs.size(); ++i) {
       ASTNode grpbyExpr = grpByExprs.get(i);
-      String text = grpbyExpr.toStringTree();
-      ColumnInfo exprInfo = groupByInputRowResolver.get("", text);
+      ColumnInfo exprInfo = groupByInputRowResolver.getExpression(grpbyExpr);
 
       if (exprInfo == null) {
         throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr));
@@ -2094,7 +2092,7 @@
           .getIsPartitionCol()));
       String field = getColumnInternalName(i);
       outputColumnNames.add(field);
-      groupByOutputRowResolver.put("", grpbyExpr.toStringTree(),
+      groupByOutputRowResolver.putExpression(grpbyExpr,
           new ColumnInfo(field, exprInfo.getType(), "", false));
       colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
     }
@@ -2120,9 +2118,9 @@
       if (!partialAggDone) {
         // 0 is the function name
         for (int i = 1; i < value.getChildCount(); i++) {
-          String text = value.getChild(i).toStringTree();
           ASTNode paraExpr = (ASTNode) value.getChild(i);
-          ColumnInfo paraExprInfo = groupByInputRowResolver.get("", text);
+          ColumnInfo paraExprInfo =
+            groupByInputRowResolver.getExpression(paraExpr);
           if (paraExprInfo == null) {
             throw new SemanticException(ErrorMsg.INVALID_COLUMN
                 .getMsg(paraExpr));
@@ -2135,8 +2133,7 @@
               paraExprInfo.getIsPartitionCol()));
         }
       } else {
-        String text = entry.getKey();
-        ColumnInfo paraExprInfo = groupByInputRowResolver.get("", text);
+        ColumnInfo paraExprInfo = groupByInputRowResolver.getExpression(value);
         if (paraExprInfo == null) {
           throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(value));
         }
@@ -2168,7 +2165,7 @@
       String field = getColumnInternalName(groupByKeys.size()
           + aggregations.size() - 1);
       outputColumnNames.add(field);
-      groupByOutputRowResolver.put("", value.toStringTree(), new ColumnInfo(
+      groupByOutputRowResolver.putExpression(value, new ColumnInfo(
           field, udaf.returnType, "", false));
     }
 
@@ -2218,7 +2215,7 @@
       groupByKeys.add(grpByExprNode);
       String field = getColumnInternalName(i);
       outputColumnNames.add(field);
-      groupByOutputRowResolver.put("", grpbyExpr.toStringTree(),
+      groupByOutputRowResolver.putExpression(grpbyExpr,
           new ColumnInfo(field, grpByExprNode.getTypeInfo(), "", false));
       colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
     }
@@ -2230,15 +2227,14 @@
       // 0 is function name
       for (int i = 1; i < value.getChildCount(); i++) {
         ASTNode parameter = (ASTNode) value.getChild(i);
-        String text = parameter.toStringTree();
-        if (groupByOutputRowResolver.get("", text) == null) {
+        if (groupByOutputRowResolver.getExpression(parameter) == null) {
           ExprNodeDesc distExprNode = genExprNodeDesc(parameter,
               groupByInputRowResolver);
           groupByKeys.add(distExprNode);
           numDistn++;
           String field = getColumnInternalName(grpByExprs.size() + numDistn - 1);
           outputColumnNames.add(field);
-          groupByOutputRowResolver.put("", text, new ColumnInfo(field,
+          groupByOutputRowResolver.putExpression(parameter, new ColumnInfo(field,
               distExprNode.getTypeInfo(), "", false));
           colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
         }
@@ -2278,7 +2274,7 @@
       String field = getColumnInternalName(groupByKeys.size()
           + aggregations.size() - 1);
       outputColumnNames.add(field);
-      groupByOutputRowResolver.put("", value.toStringTree(), new ColumnInfo(
+      groupByOutputRowResolver.putExpression(value, new ColumnInfo(
           field, udaf.returnType, "", false));
       // Save the evaluator so that it can be used by the next-stage
       // GroupByOperators
@@ -2330,14 +2326,13 @@
       ExprNodeDesc inputExpr = genExprNodeDesc(grpbyExpr,
           reduceSinkInputRowResolver);
       reduceKeys.add(inputExpr);
-      String text = grpbyExpr.toStringTree();
-      if (reduceSinkOutputRowResolver.get("", text) == null) {
+      if (reduceSinkOutputRowResolver.getExpression(grpbyExpr) == null) {
         outputColumnNames.add(getColumnInternalName(reduceKeys.size() - 1));
         String field = Utilities.ReduceField.KEY.toString() + "."
             + getColumnInternalName(reduceKeys.size() - 1);
         ColumnInfo colInfo = new ColumnInfo(field, reduceKeys.get(
             reduceKeys.size() - 1).getTypeInfo(), null, false);
-        reduceSinkOutputRowResolver.put("", text, colInfo);
+        reduceSinkOutputRowResolver.putExpression(grpbyExpr, colInfo);
         colExprMap.put(colInfo.getInternalName(), inputExpr);
       } else {
         throw new SemanticException(ErrorMsg.DUPLICATE_GROUPBY_KEY
@@ -2351,8 +2346,7 @@
       // 0 is function name
       for (int i = 1; i < value.getChildCount(); i++) {
         ASTNode parameter = (ASTNode) value.getChild(i);
-        String text = parameter.toStringTree();
-        if (reduceSinkOutputRowResolver.get("", text) == null) {
+        if (reduceSinkOutputRowResolver.getExpression(parameter) == null) {
           reduceKeys
               .add(genExprNodeDesc(parameter, reduceSinkInputRowResolver));
           outputColumnNames.add(getColumnInternalName(reduceKeys.size() - 1));
@@ -2360,7 +2354,7 @@
               + getColumnInternalName(reduceKeys.size() - 1);
           ColumnInfo colInfo = new ColumnInfo(field, reduceKeys.get(
               reduceKeys.size() - 1).getTypeInfo(), null, false);
-          reduceSinkOutputRowResolver.put("", text, colInfo);
+          reduceSinkOutputRowResolver.putExpression(parameter, colInfo);
           colExprMap.put(colInfo.getInternalName(), reduceKeys.get(reduceKeys
               .size() - 1));
         }
@@ -2378,15 +2372,14 @@
         // 0 is function name
         for (int i = 1; i < value.getChildCount(); i++) {
           ASTNode parameter = (ASTNode) value.getChild(i);
-          String text = parameter.toStringTree();
-          if (reduceSinkOutputRowResolver.get("", text) == null) {
+          if (reduceSinkOutputRowResolver.getExpression(parameter) == null) {
             reduceValues.add(genExprNodeDesc(parameter,
                 reduceSinkInputRowResolver));
             outputColumnNames
                 .add(getColumnInternalName(reduceValues.size() - 1));
             String field = Utilities.ReduceField.VALUE.toString() + "."
                 + getColumnInternalName(reduceValues.size() - 1);
-            reduceSinkOutputRowResolver.put("", text, new ColumnInfo(field,
+            reduceSinkOutputRowResolver.putExpression(parameter, new ColumnInfo(field,
                 reduceValues.get(reduceValues.size() - 1).getTypeInfo(), null,
                 false));
           }
@@ -2406,7 +2399,7 @@
         outputColumnNames.add(getColumnInternalName(reduceValues.size() - 1));
         String field = Utilities.ReduceField.VALUE.toString() + "."
             + getColumnInternalName(reduceValues.size() - 1);
-        reduceSinkOutputRowResolver.put("", (entry.getValue()).toStringTree(),
+        reduceSinkOutputRowResolver.putExpression(entry.getValue(),
             new ColumnInfo(field, type, null, false));
       }
     }
@@ -2453,14 +2446,14 @@
       ASTNode grpbyExpr = grpByExprs.get(i);
       String field = getColumnInternalName(i);
       outputColumnNames.add(field);
-      TypeInfo typeInfo = reduceSinkInputRowResolver2.get("",
-          grpbyExpr.toStringTree()).getType();
+      TypeInfo typeInfo = reduceSinkInputRowResolver2.getExpression(
+          grpbyExpr).getType();
       ExprNodeColumnDesc inputExpr = new ExprNodeColumnDesc(typeInfo, field,
           "", false);
       reduceKeys.add(inputExpr);
       ColumnInfo colInfo = new ColumnInfo(Utilities.ReduceField.KEY.toString()
           + "." + field, typeInfo, "", false);
-      reduceSinkOutputRowResolver2.put("", grpbyExpr.toStringTree(), colInfo);
+      reduceSinkOutputRowResolver2.putExpression(grpbyExpr, colInfo);
       colExprMap.put(colInfo.getInternalName(), inputExpr);
     }
     // Get partial aggregation results and store in reduceValues
@@ -2471,13 +2464,13 @@
     for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
       String field = getColumnInternalName(inputField);
       ASTNode t = entry.getValue();
-      TypeInfo typeInfo = reduceSinkInputRowResolver2.get("", t.toStringTree())
+      TypeInfo typeInfo = reduceSinkInputRowResolver2.getExpression(t)
           .getType();
       reduceValues.add(new ExprNodeColumnDesc(typeInfo, field, "", false));
       inputField++;
       String col = getColumnInternalName(reduceValues.size() - 1);
       outputColumnNames.add(col);
-      reduceSinkOutputRowResolver2.put("", t.toStringTree(), new ColumnInfo(
+      reduceSinkOutputRowResolver2.putExpression(t, new ColumnInfo(
           Utilities.ReduceField.VALUE.toString() + "." + col, typeInfo, "",
           false));
     }
@@ -2522,8 +2515,7 @@
     ArrayList<String> outputColumnNames = new ArrayList<String>();
     for (int i = 0; i < grpByExprs.size(); ++i) {
       ASTNode grpbyExpr = grpByExprs.get(i);
-      String text = grpbyExpr.toStringTree();
-      ColumnInfo exprInfo = groupByInputRowResolver2.get("", text);
+      ColumnInfo exprInfo = groupByInputRowResolver2.getExpression(grpbyExpr);
       if (exprInfo == null) {
         throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr));
       }
@@ -2533,7 +2525,7 @@
           exprInfo.getTabAlias(), exprInfo.getIsPartitionCol()));
       String field = getColumnInternalName(i);
       outputColumnNames.add(field);
-      groupByOutputRowResolver2.put("", grpbyExpr.toStringTree(),
+      groupByOutputRowResolver2.putExpression(grpbyExpr,
           new ColumnInfo(field, exprInfo.getType(), "", false));
       colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
     }
@@ -2542,8 +2534,7 @@
     for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
       ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
       ASTNode value = entry.getValue();
-      String text = entry.getKey();
-      ColumnInfo paraExprInfo = groupByInputRowResolver2.get("", text);
+      ColumnInfo paraExprInfo = groupByInputRowResolver2.getExpression(value);
       if (paraExprInfo == null) {
         throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(value));
       }
@@ -2572,7 +2563,7 @@
       String field = getColumnInternalName(groupByKeys.size()
           + aggregations.size() - 1);
       outputColumnNames.add(field);
-      groupByOutputRowResolver2.put("", value.toStringTree(), new ColumnInfo(
+      groupByOutputRowResolver2.putExpression(value, new ColumnInfo(
           field, udaf.returnType, "", false));
     }
 
@@ -3769,7 +3760,7 @@
       outputColumnNames.add(field);
       ColumnInfo colInfo2 = new ColumnInfo(field, grpByExprNode.getTypeInfo(),
           "", false);
-      groupByOutputRowResolver.put("", colName.toStringTree(), colInfo2);
+      groupByOutputRowResolver.putExpression(colName, colInfo2);
 
       // establish mapping from the output column to the input column
       colExprMap.put(field, grpByExprNode);
@@ -4411,14 +4402,13 @@
     for (ASTNode distn : distExprs) {
       ExprNodeDesc distExpr = genExprNodeDesc(distn, inputRR);
       reduceKeys.add(distExpr);
-      String text = distn.toStringTree();
-      if (reduceSinkOutputRowResolver.get("", text) == null) {
+      if (reduceSinkOutputRowResolver.getExpression(distn) == null) {
         outputColumnNames.add(getColumnInternalName(reduceKeys.size() - 1));
         String field = Utilities.ReduceField.KEY.toString() + "."
             + getColumnInternalName(reduceKeys.size() - 1);
         ColumnInfo colInfo = new ColumnInfo(field, reduceKeys.get(
             reduceKeys.size() - 1).getTypeInfo(), "", false);
-        reduceSinkOutputRowResolver.put("", text, colInfo);
+        reduceSinkOutputRowResolver.putExpression(distn, colInfo);
         colExprMap.put(colInfo.getInternalName(), distExpr);
       }
     }
@@ -4429,16 +4419,15 @@
       List<ASTNode> grpByExprs = getGroupByForClause(qbp, dest);
       for (int i = 0; i < grpByExprs.size(); ++i) {
         ASTNode grpbyExpr = grpByExprs.get(i);
-        String text = grpbyExpr.toStringTree();
 
-        if (reduceSinkOutputRowResolver.get("", text) == null) {
+        if (reduceSinkOutputRowResolver.getExpression(grpbyExpr) == null) {
           ExprNodeDesc grpByExprNode = genExprNodeDesc(grpbyExpr, inputRR);
           reduceValues.add(grpByExprNode);
           String field = Utilities.ReduceField.VALUE.toString() + "."
               + getColumnInternalName(reduceValues.size() - 1);
           ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(
               reduceValues.size() - 1).getTypeInfo(), "", false);
-          reduceSinkOutputRowResolver.put("", text, colInfo);
+          reduceSinkOutputRowResolver.putExpression(grpbyExpr, colInfo);
           outputColumnNames.add(getColumnInternalName(reduceValues.size() - 1));
         }
       }
@@ -4455,16 +4444,15 @@
         // 0 is the function name
         for (int i = 1; i < value.getChildCount(); i++) {
           ASTNode paraExpr = (ASTNode) value.getChild(i);
-          String text = paraExpr.toStringTree();
 
-          if (reduceSinkOutputRowResolver.get("", text) == null) {
+          if (reduceSinkOutputRowResolver.getExpression(paraExpr) == null) {
             ExprNodeDesc paraExprNode = genExprNodeDesc(paraExpr, inputRR);
             reduceValues.add(paraExprNode);
             String field = Utilities.ReduceField.VALUE.toString() + "."
                 + getColumnInternalName(reduceValues.size() - 1);
             ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(
                 reduceValues.size() - 1).getTypeInfo(), "", false);
-            reduceSinkOutputRowResolver.put("", text, colInfo);
+            reduceSinkOutputRowResolver.putExpression(paraExpr, colInfo);
             outputColumnNames
                 .add(getColumnInternalName(reduceValues.size() - 1));
           }
@@ -5652,7 +5640,7 @@
     // Now expand the view definition with extras such as explicit column
     // references; this expanded form is what we'll re-parse when the view is
     // referenced later.
-    unparseTranslator.applyTranslation(ctx.getTokenRewriteStream());
+    unparseTranslator.applyTranslations(ctx.getTokenRewriteStream());
     String expandedText = ctx.getTokenRewriteStream().toString(
         viewSelect.getTokenStartIndex(), viewSelect.getTokenStopIndex());
 
@@ -5722,8 +5710,12 @@
     // build the exprNodeFuncDesc with recursively built children.
 
     // If the current subExpression is pre-calculated, as in Group-By etc.
-    ColumnInfo colInfo = input.get("", expr.toStringTree());
+    ColumnInfo colInfo = input.getExpression(expr);
     if (colInfo != null) {
+      ASTNode source = input.getExpressionSource(expr);
+      if (source != null) {
+        unparseTranslator.addCopyTranslation(expr, source);
+      }
       return new ExprNodeColumnDesc(colInfo.getType(), colInfo
           .getInternalName(), colInfo.getTabAlias(), colInfo
           .getIsPartitionCol());

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=906619&r1=906618&r2=906619&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Thu Feb  4 19:44:01 2010
@@ -93,11 +93,15 @@
     ExprNodeDesc desc = null;
 
     // If the current subExpression is pre-calculated, as in Group-By etc.
-    ColumnInfo colInfo = input.get("", expr.toStringTree());
+    ColumnInfo colInfo = input.getExpression(expr);
     if (colInfo != null) {
       desc = new ExprNodeColumnDesc(colInfo.getType(), colInfo
           .getInternalName(), colInfo.getTabAlias(), colInfo
           .getIsPartitionCol());
+      ASTNode source = input.getExpressionSource(expr);
+      if (source != null) {
+        ctx.getUnparseTranslator().addCopyTranslation(expr, source);
+      }
       return desc;
     }
     return desc;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java?rev=906619&r1=906618&r2=906619&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java Thu Feb  4 19:44:01 2010
@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
+import java.util.ArrayList;
 import java.util.Map;
+import java.util.List;
 import java.util.NavigableMap;
 import java.util.TreeMap;
 
@@ -32,10 +34,12 @@
 class UnparseTranslator {
   // key is token start index
   private final NavigableMap<Integer, Translation> translations;
+  private final List<CopyTranslation> copyTranslations;
   private boolean enabled;
 
   public UnparseTranslator() {
     translations = new TreeMap<Integer, Translation>();
+    copyTranslations = new ArrayList<CopyTranslation>();
   }
 
   /**
@@ -54,9 +58,12 @@
 
   /**
    * Register a translation to be performed as part of unparse.
+   * The translation must not overlap with any previously
+   * registered translations (unless it is identical to an
+   * existing translation, in which case it is ignored).
    * 
    * @param node
-   *          source node whose subtree is to be replaced
+   *          target node whose subtree is to be replaced
    * 
    * @param replacementText
    *          text to use as replacement
@@ -85,7 +92,7 @@
     Map.Entry<Integer, Translation> existingEntry;
     existingEntry = translations.floorEntry(tokenStartIndex);
     if (existingEntry != null) {
-      if (existingEntry.getKey() == tokenStartIndex) {
+      if (existingEntry.getKey().equals(tokenStartIndex)) {
         if (existingEntry.getValue().tokenStopIndex == tokenStopIndex) {
           if (existingEntry.getValue().replacementText.equals(replacementText)) {
             // exact match for existing mapping: somebody is doing something
@@ -123,15 +130,64 @@
   }
 
   /**
-   * Apply translations on the given token stream.
+   * Register a "copy" translation in which a node will be translated into
+   * whatever the translation turns out to be for another node (after
+   * previously registered translations have already been performed).  Deferred
+   * translations are performed in the order they are registered, and follow
+   * the same rules regarding overlap as non-copy translations.
+   *
+   * @param targetNode node whose subtree is to be replaced
+   *
+   * @param sourceNode the node providing the replacement text
+   *
+   */
+  void addCopyTranslation(ASTNode targetNode, ASTNode sourceNode) {
+    if (!enabled) {
+      return;
+    }
+
+    if (targetNode.getOrigin() != null) {
+      return;
+    }
+
+    CopyTranslation copyTranslation = new CopyTranslation();
+    copyTranslation.targetNode = targetNode;
+    copyTranslation.sourceNode = sourceNode;
+    copyTranslations.add(copyTranslation);
+  }
+
+  /**
+   * Apply all translations on the given token stream.
    * 
    * @param tokenRewriteStream
    *          rewrite-capable stream
    */
-  void applyTranslation(TokenRewriteStream tokenRewriteStream) {
+  void applyTranslations(TokenRewriteStream tokenRewriteStream) {
     for (Map.Entry<Integer, Translation> entry : translations.entrySet()) {
-      tokenRewriteStream.replace(entry.getKey(),
-          entry.getValue().tokenStopIndex, entry.getValue().replacementText);
+      tokenRewriteStream.replace(
+        entry.getKey(),
+        entry.getValue().tokenStopIndex,
+        entry.getValue().replacementText);
+    }
+    for (CopyTranslation copyTranslation : copyTranslations) {
+      String replacementText = tokenRewriteStream.toString(
+        copyTranslation.sourceNode.getTokenStartIndex(),
+        copyTranslation.sourceNode.getTokenStopIndex());
+      String currentText = tokenRewriteStream.toString(
+        copyTranslation.targetNode.getTokenStartIndex(),
+        copyTranslation.targetNode.getTokenStopIndex());
+      if (currentText.equals(replacementText)) {
+        // copy is a nop, so skip it--this is important for avoiding
+        // spurious overlap assertions
+        continue;
+      }
+      // Call addTranslation just to get the assertions for overlap
+      // checking.
+      addTranslation(copyTranslation.targetNode, replacementText);
+      tokenRewriteStream.replace(
+        copyTranslation.targetNode.getTokenStartIndex(),
+        copyTranslation.targetNode.getTokenStopIndex(),
+        replacementText);
     }
   }
 
@@ -144,4 +200,9 @@
       return "" + tokenStopIndex + " -> " + replacementText;
     }
   }
+
+  private static class CopyTranslation {
+    ASTNode targetNode;
+    ASTNode sourceNode;
+  }
 }

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q?rev=906619&r1=906618&r2=906619&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q Thu Feb  4 19:44:01 2010
@@ -12,6 +12,8 @@
 DROP VIEW view12;
 DROP VIEW view13;
 DROP VIEW view14;
+DROP VIEW view15;
+DROP VIEW view16;
 DROP TEMPORARY FUNCTION test_translate;
 DROP TEMPORARY FUNCTION test_max;
 DROP TEMPORARY FUNCTION test_explode;
@@ -164,6 +166,25 @@
 SELECT * FROM view14
 ORDER BY k1;
 
+-- test usage of GROUP BY within view
+CREATE VIEW view15 AS
+SELECT key,COUNT(value) AS value_count
+FROM src
+GROUP BY key;
+DESCRIBE EXTENDED view15;
+SELECT * FROM view15
+ORDER BY value_count DESC, key
+LIMIT 10;
+
+-- test usage of DISTINCT within view
+CREATE VIEW view16 AS
+SELECT DISTINCT value
+FROM src;
+DESCRIBE EXTENDED view16;
+SELECT * FROM view16
+ORDER BY value
+LIMIT 10;
+
 -- this should work since currently we don't track view->table
 -- dependencies for implementing RESTRICT
 DROP TABLE table1;
@@ -182,6 +203,8 @@
 DROP VIEW view12;
 DROP VIEW view13;
 DROP VIEW view14;
+DROP VIEW view15;
+DROP VIEW view16;
 DROP TEMPORARY FUNCTION test_translate;
 DROP TEMPORARY FUNCTION test_max;
 DROP TEMPORARY FUNCTION test_explode;

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/create_view.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/create_view.q.out?rev=906619&r1=906618&r2=906619&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/create_view.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/create_view.q.out Thu Feb  4 19:44:01 2010
@@ -54,6 +54,14 @@
 PREHOOK: type: DROPVIEW
 POSTHOOK: query: DROP VIEW view14
 POSTHOOK: type: DROPVIEW
+PREHOOK: query: DROP VIEW view15
+PREHOOK: type: DROPVIEW
+POSTHOOK: query: DROP VIEW view15
+POSTHOOK: type: DROPVIEW
+PREHOOK: query: DROP VIEW view16
+PREHOOK: type: DROPVIEW
+POSTHOOK: query: DROP VIEW view16
+POSTHOOK: type: DROPVIEW
 PREHOOK: query: DROP TEMPORARY FUNCTION test_translate
 PREHOOK: type: DROPFUNCTION
 POSTHOOK: query: DROP TEMPORARY FUNCTION test_translate
@@ -73,60 +81,60 @@
 PREHOOK: query: SELECT * FROM src WHERE key=86
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1600701811/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-57-53_297_7782573084639735106/10000
 POSTHOOK: query: SELECT * FROM src WHERE key=86
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1600701811/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-57-53_297_7782573084639735106/10000
 86	val_86
 PREHOOK: query: CREATE VIEW view1 AS SELECT value FROM src WHERE key=86
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1774941594/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-57-57_101_1140006614243920801/10000
 POSTHOOK: query: CREATE VIEW view1 AS SELECT value FROM src WHERE key=86
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1774941594/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-57-57_101_1140006614243920801/10000
 POSTHOOK: Output: default@view1
 PREHOOK: query: CREATE VIEW view2 AS SELECT * FROM src
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/557479374/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-57-57_277_6299232103234711745/10000
 POSTHOOK: query: CREATE VIEW view2 AS SELECT * FROM src
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/557479374/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-57-57_277_6299232103234711745/10000
 POSTHOOK: Output: default@view2
 PREHOOK: query: CREATE VIEW view3(valoo) AS SELECT upper(value) FROM src WHERE key=86
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/543421203/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-57-57_434_4725140610794013500/10000
 POSTHOOK: query: CREATE VIEW view3(valoo) AS SELECT upper(value) FROM src WHERE key=86
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/543421203/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-57-57_434_4725140610794013500/10000
 POSTHOOK: Output: default@view3
 PREHOOK: query: SELECT * from view1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/728576887/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-57-57_481_8837322274426963555/10000
 POSTHOOK: query: SELECT * from view1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/728576887/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-57-57_481_8837322274426963555/10000
 val_86
 PREHOOK: query: SELECT * from view2 where key=18
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/850967798/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-00_909_533868798679407560/10000
 POSTHOOK: query: SELECT * from view2 where key=18
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/850967798/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-00_909_533868798679407560/10000
 18	val_18
 18	val_18
 PREHOOK: query: SELECT * from view3
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1364208710/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-04_450_191008179753881262/10000
 POSTHOOK: query: SELECT * from view3
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1364208710/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-04_450_191008179753881262/10000
 VAL_86
 PREHOOK: query: -- test EXPLAIN output for CREATE VIEW
 EXPLAIN
@@ -227,7 +235,7 @@
 POSTHOOK: type: DESCTABLE
 value	string	
 	 	 
-Detailed Table Information	Table(tableName:view1, dbName:default, owner:jsichi, createTime:1264709888, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264709888}, viewOriginalText:SELECT value FROM src WHERE key=86, viewExpandedText:SELECT `src`.`value` FROM `src` WHERE `src`.`key`=86, tableType:VIRTUAL_VIEW)	
+Detailed Table Information	Table(tableName:view1, dbName:default, owner:jsichi, createTime:1265309877, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265309877}, viewOriginalText:SELECT value FROM src WHERE key=86, viewExpandedText:SELECT `src`.`value` FROM `src` WHERE `src`.`key`=86, tableType:VIRTUAL_VIEW)	
 PREHOOK: query: DESCRIBE view2
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: DESCRIBE view2
@@ -241,7 +249,7 @@
 key	string	
 value	string	
 	 	 
-Detailed Table Information	Table(tableName:view2, dbName:default, owner:jsichi, createTime:1264709888, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264709888}, viewOriginalText:SELECT * FROM src, viewExpandedText:SELECT `src`.`key`, `src`.`value` FROM `src`, tableType:VIRTUAL_VIEW)	
+Detailed Table Information	Table(tableName:view2, dbName:default, owner:jsichi, createTime:1265309877, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265309877}, viewOriginalText:SELECT * FROM src, viewExpandedText:SELECT `src`.`key`, `src`.`value` FROM `src`, tableType:VIRTUAL_VIEW)	
 PREHOOK: query: DESCRIBE view3
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: DESCRIBE view3
@@ -253,7 +261,7 @@
 POSTHOOK: type: DESCTABLE
 valoo	string	
 	 	 
-Detailed Table Information	Table(tableName:view3, dbName:default, owner:jsichi, createTime:1264709888, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:valoo, type:string, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264709888}, viewOriginalText:SELECT upper(value) FROM src WHERE key=86, viewExpandedText:SELECT `_c0` AS `valoo` FROM (SELECT upper(`src`.`value`) FROM `src` WHERE `src`.`key`=86) `view3`, tableType:VIRTUAL_VIEW)	
+Detailed Table Information	Table(tableName:view3, dbName:default, owner:jsichi, createTime:1265309877, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:valoo, type:string, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265309877}, viewOriginalText:SELECT upper(value) FROM src WHERE key=86, viewExpandedText:SELECT `_c0` AS `valoo` FROM (SELECT upper(`src`.`value`) FROM `src` WHERE `src`.`key`=86) `view3`, tableType:VIRTUAL_VIEW)	
 PREHOOK: query: CREATE TABLE table1 (key int)
 PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE table1 (key int)
@@ -269,7 +277,7 @@
 POSTHOOK: type: DESCTABLE
 key	int	
 	 	 
-Detailed Table Information	Table(tableName:table1, dbName:default, owner:jsichi, createTime:1264709897, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null)], location:file:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264709897}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:table1, dbName:default, owner:jsichi, createTime:1265309888, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null)], location:file:/Users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265309888}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: DESCRIBE EXTENDED src1
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: DESCRIBE EXTENDED src1
@@ -277,7 +285,7 @@
 key	string	default
 value	string	default
 	 	 
-Detailed Table Information	Table(tableName:src1, dbName:default, owner:null, createTime:1264709884, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:file:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264709884}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:src1, dbName:default, owner:null, createTime:1265309872, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:default), FieldSchema(name:value, type:string, comment:default)], location:file:/Users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/src1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265309872}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: -- use DESCRIBE EXTENDED on a base table as a point of comparison for
 -- view descriptions
 DESCRIBE EXTENDED table1
@@ -288,7 +296,7 @@
 POSTHOOK: type: DESCTABLE
 key	int	
 	 	 
-Detailed Table Information	Table(tableName:table1, dbName:default, owner:jsichi, createTime:1264709897, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null)], location:file:/data/users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264709897}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:table1, dbName:default, owner:jsichi, createTime:1265309888, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null)], location:file:/Users/jsichi/open/hive-trunk/build/ql/test/data/warehouse/table1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265309888}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: INSERT OVERWRITE TABLE table1 SELECT key FROM src WHERE key = 86
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -300,27 +308,27 @@
 PREHOOK: query: SELECT * FROM table1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/2141444732/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-12_803_1284572417109920240/10000
 POSTHOOK: query: SELECT * FROM table1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/2141444732/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-12_803_1284572417109920240/10000
 86
 PREHOOK: query: CREATE VIEW view4 AS SELECT * FROM table1
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/898468153/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-12_845_6779990684786101439/10000
 POSTHOOK: query: CREATE VIEW view4 AS SELECT * FROM table1
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/898468153/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-12_845_6779990684786101439/10000
 POSTHOOK: Output: default@view4
 PREHOOK: query: SELECT * FROM view4
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/528396960/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-12_884_2398988658087884662/10000
 POSTHOOK: query: SELECT * FROM view4
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/528396960/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-12_884_2398988658087884662/10000
 86
 PREHOOK: query: DESCRIBE view4
 PREHOOK: type: DESCTABLE
@@ -336,20 +344,20 @@
 PREHOOK: query: SELECT * FROM table1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/987067062/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-18_331_1242169140808223548/10000
 POSTHOOK: query: SELECT * FROM table1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/987067062/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-18_331_1242169140808223548/10000
 86	NULL
 PREHOOK: query: SELECT * FROM view4
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1541942127/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-18_365_7928861520958340144/10000
 POSTHOOK: query: SELECT * FROM view4
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1541942127/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-18_365_7928861520958340144/10000
 86
 PREHOOK: query: DESCRIBE table1
 PREHOOK: type: DESCTABLE
@@ -365,20 +373,20 @@
 PREHOOK: query: CREATE VIEW view5 AS SELECT v1.key as key1, v2.key as key2
 FROM view4 v1 join view4 v2
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/409117872/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-24_000_4695504796079686794/10000
 POSTHOOK: query: CREATE VIEW view5 AS SELECT v1.key as key1, v2.key as key2
 FROM view4 v1 join view4 v2
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/409117872/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-24_000_4695504796079686794/10000
 POSTHOOK: Output: default@view5
 PREHOOK: query: SELECT * FROM view5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1552433053/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-24_296_7942853463626878015/10000
 POSTHOOK: query: SELECT * FROM view5
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1552433053/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-24_296_7942853463626878015/10000
 86	86
 PREHOOK: query: DESCRIBE view5
 PREHOOK: type: DESCTABLE
@@ -391,13 +399,13 @@
 CREATE VIEW view6(valoo COMMENT 'I cannot spell') AS
 SELECT upper(value) as blarg FROM src WHERE key=86
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/620020128/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-31_192_6828670885580433331/10000
 POSTHOOK: query: -- verify that column name and comment in DDL portion
 -- overrides column alias in SELECT
 CREATE VIEW view6(valoo COMMENT 'I cannot spell') AS
 SELECT upper(value) as blarg FROM src WHERE key=86
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/620020128/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-31_192_6828670885580433331/10000
 POSTHOOK: Output: default@view6
 PREHOOK: query: DESCRIBE view6
 PREHOOK: type: DESCTABLE
@@ -411,7 +419,7 @@
 ORDER BY key, value
 LIMIT 10
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/339578208/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-31_286_6005363713007584990/10000
 POSTHOOK: query: -- verify that ORDER BY and LIMIT are both supported in view def
 CREATE VIEW view7 AS
 SELECT * FROM src
@@ -419,16 +427,16 @@
 ORDER BY key, value
 LIMIT 10
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/339578208/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-31_286_6005363713007584990/10000
 POSTHOOK: Output: default@view7
 PREHOOK: query: SELECT * FROM view7
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/242830582/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-31_335_579300007330547821/10000
 POSTHOOK: query: SELECT * FROM view7
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/242830582/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-31_335_579300007330547821/10000
 82	val_82
 83	val_83
 83	val_83
@@ -445,14 +453,14 @@
 SELECT * FROM view7 ORDER BY key DESC, value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1636014325/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-37_066_9213562375794733620/10000
 POSTHOOK: query: -- top-level ORDER BY should override the one inside the view
 -- (however, the inside ORDER BY should still influence the evaluation
 -- of the limit)
 SELECT * FROM view7 ORDER BY key DESC, value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1636014325/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-37_066_9213562375794733620/10000
 90	val_90
 90	val_90
 87	val_87
@@ -467,12 +475,12 @@
 SELECT * FROM view7 LIMIT 5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/130847292/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-48_529_5840920753828411593/10000
 POSTHOOK: query: -- top-level LIMIT should override if lower
 SELECT * FROM view7 LIMIT 5
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/130847292/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-48_529_5840920753828411593/10000
 82	val_82
 83	val_83
 83	val_83
@@ -482,12 +490,12 @@
 SELECT * FROM view7 LIMIT 20
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1942970084/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-55_996_55238509253219398/10000
 POSTHOOK: query: -- but not if higher
 SELECT * FROM view7 LIMIT 20
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1942970084/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-58-55_996_55238509253219398/10000
 82	val_82
 83	val_83
 83	val_83
@@ -510,12 +518,12 @@
 SELECT test_translate('abc', 'a', 'b')
 FROM table1
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/2017981069/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-01_575_7723116161754412999/10000
 POSTHOOK: query: CREATE VIEW view8(c) AS
 SELECT test_translate('abc', 'a', 'b')
 FROM table1
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/2017981069/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-01_575_7723116161754412999/10000
 POSTHOOK: Output: default@view8
 PREHOOK: query: DESCRIBE EXTENDED view8
 PREHOOK: type: DESCTABLE
@@ -523,17 +531,17 @@
 POSTHOOK: type: DESCTABLE
 c	string	
 	 	 
-Detailed Table Information	Table(tableName:view8, dbName:default, owner:jsichi, createTime:1264709925, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:c, type:string, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264709925}, viewOriginalText:SELECT test_translate('abc', 'a', 'b')	 
+Detailed Table Information	Table(tableName:view8, dbName:default, owner:jsichi, createTime:1265309941, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:c, type:string, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265309941}, viewOriginalText:SELECT test_translate('abc', 'a', 'b')	 
 FROM table1, viewExpandedText:SELECT `_c0` AS `c` FROM (SELECT `test_translate`('abc', 'a', 'b')	 	 
 FROM `table1`) `view8`, tableType:VIRTUAL_VIEW)		 
 PREHOOK: query: SELECT * FROM view8
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1776598981/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-01_950_5622952838826584437/10000
 POSTHOOK: query: SELECT * FROM view8
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1776598981/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-01_950_5622952838826584437/10000
 bbc
 PREHOOK: query: -- test usage of a UDAF within a view
 CREATE TEMPORARY FUNCTION test_max AS
@@ -547,12 +555,12 @@
 SELECT test_max(length(value))
 FROM src
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1642348691/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-07_044_5791517349444492101/10000
 POSTHOOK: query: CREATE VIEW view9(m) AS
 SELECT test_max(length(value))
 FROM src
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1642348691/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-07_044_5791517349444492101/10000
 POSTHOOK: Output: default@view9
 PREHOOK: query: DESCRIBE EXTENDED view9
 PREHOOK: type: DESCTABLE
@@ -560,28 +568,28 @@
 POSTHOOK: type: DESCTABLE
 m	int	
 	 	 
-Detailed Table Information	Table(tableName:view9, dbName:default, owner:jsichi, createTime:1264709928, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:m, type:int, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264709928}, viewOriginalText:SELECT test_max(length(value))	 
+Detailed Table Information	Table(tableName:view9, dbName:default, owner:jsichi, createTime:1265309947, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:m, type:int, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265309947}, viewOriginalText:SELECT test_max(length(value))	 
 FROM src, viewExpandedText:SELECT `_c0` AS `m` FROM (SELECT `test_max`(length(`src`.`value`))	 	 
 FROM `src`) `view9`, tableType:VIRTUAL_VIEW)		 
 PREHOOK: query: SELECT * FROM view9
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/2110694538/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-07_150_2335956643529213853/10000
 POSTHOOK: query: SELECT * FROM view9
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/2110694538/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-07_150_2335956643529213853/10000
 7
 PREHOOK: query: -- test usage of a subselect within a view
 CREATE VIEW view10 AS
 SELECT slurp.* FROM (SELECT * FROM src WHERE key=86) slurp
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/991232364/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-13_020_15297505204545248/10000
 POSTHOOK: query: -- test usage of a subselect within a view
 CREATE VIEW view10 AS
 SELECT slurp.* FROM (SELECT * FROM src WHERE key=86) slurp
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/991232364/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-13_020_15297505204545248/10000
 POSTHOOK: Output: default@view10
 PREHOOK: query: DESCRIBE EXTENDED view10
 PREHOOK: type: DESCTABLE
@@ -590,15 +598,15 @@
 key	string	
 value	string	
 	 	 
-Detailed Table Information	Table(tableName:view10, dbName:default, owner:jsichi, createTime:1264709931, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264709931}, viewOriginalText:SELECT slurp.* FROM (SELECT * FROM src WHERE key=86) slurp, viewExpandedText:SELECT `slurp`.`key`, `slurp`.`value` FROM (SELECT `src`.`key`, `src`.`value` FROM `src` WHERE `src`.`key`=86) `slurp`, tableType:VIRTUAL_VIEW)	
+Detailed Table Information	Table(tableName:view10, dbName:default, owner:jsichi, createTime:1265309953, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265309953}, viewOriginalText:SELECT slurp.* FROM (SELECT * FROM src WHERE key=86) slurp, viewExpandedText:SELECT `slurp`.`key`, `slurp`.`value` FROM (SELECT `src`.`key`, `src`.`value` FROM `src` WHERE `src`.`key`=86) `slurp`, tableType:VIRTUAL_VIEW)	
 PREHOOK: query: SELECT * FROM view10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1318443718/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-13_203_2739584306564147849/10000
 POSTHOOK: query: SELECT * FROM view10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1318443718/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-13_203_2739584306564147849/10000
 86	val_86
 PREHOOK: query: -- test usage of a UDTF within a view
 CREATE TEMPORARY FUNCTION test_explode AS
@@ -612,12 +620,12 @@
 SELECT test_explode(array(1,2,3)) AS (boom)
 FROM table1
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1311725444/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-19_277_7422539496903079988/10000
 POSTHOOK: query: CREATE VIEW view11 AS
 SELECT test_explode(array(1,2,3)) AS (boom)
 FROM table1
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1311725444/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-19_277_7422539496903079988/10000
 POSTHOOK: Output: default@view11
 PREHOOK: query: DESCRIBE EXTENDED view11
 PREHOOK: type: DESCTABLE
@@ -625,17 +633,17 @@
 POSTHOOK: type: DESCTABLE
 boom	int	
 	 	 
-Detailed Table Information	Table(tableName:view11, dbName:default, owner:jsichi, createTime:1264709934, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:boom, type:int, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264709934}, viewOriginalText:SELECT test_explode(array(1,2,3)) AS (boom)	 
+Detailed Table Information	Table(tableName:view11, dbName:default, owner:jsichi, createTime:1265309959, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:boom, type:int, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265309959}, viewOriginalText:SELECT test_explode(array(1,2,3)) AS (boom)	 
 FROM table1, viewExpandedText:SELECT `test_explode`(array(1,2,3)) AS (`boom`)	 	 
 FROM `table1`, tableType:VIRTUAL_VIEW)		 
 PREHOOK: query: SELECT * FROM view11
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1246951240/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-19_569_7985933358186532472/10000
 POSTHOOK: query: SELECT * FROM view11
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1246951240/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-19_569_7985933358186532472/10000
 1
 2
 3
@@ -643,12 +651,12 @@
 CREATE VIEW view12 AS
 SELECT * FROM src LATERAL VIEW explode(array(1,2,3)) myTable AS myCol
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/2101198350/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-23_419_7948720262934047061/10000
 POSTHOOK: query: -- test usage of LATERAL within a view
 CREATE VIEW view12 AS
 SELECT * FROM src LATERAL VIEW explode(array(1,2,3)) myTable AS myCol
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/2101198350/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-23_419_7948720262934047061/10000
 POSTHOOK: Output: default@view12
 PREHOOK: query: DESCRIBE EXTENDED view12
 PREHOOK: type: DESCTABLE
@@ -658,43 +666,43 @@
 value	string	
 mycol	int	
 	 	 
-Detailed Table Information	Table(tableName:view12, dbName:default, owner:jsichi, createTime:1264709937, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null), FieldSchema(name:mycol, type:int, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264709937}, viewOriginalText:SELECT * FROM src LATERAL VIEW explode(array(1,2,3)) myTable AS myCol, viewExpandedText:SELECT `src`.`key`, `src`.`value`, `mytable`.`mycol` FROM `src` LATERAL VIEW explode(array(1,2,3)) `myTable` AS `myCol`, tableType:VIRTUAL_VIEW)	
+Detailed Table Information	Table(tableName:view12, dbName:default, owner:jsichi, createTime:1265309963, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null), FieldSchema(name:mycol, type:int, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265309963}, viewOriginalText:SELECT * FROM src LATERAL VIEW explode(array(1,2,3)) myTable AS myCol, viewExpandedText:SELECT `src`.`key`, `src`.`value`, `mytable`.`mycol` FROM `src` LATERAL VIEW explode(array(1,2,3)) `myTable` AS `myCol`, tableType:VIRTUAL_VIEW)	
 PREHOOK: query: SELECT * FROM view12
 ORDER BY key ASC, myCol ASC LIMIT 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1985809958/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-23_548_175005041541075187/10000
 POSTHOOK: query: SELECT * FROM view12
 ORDER BY key ASC, myCol ASC LIMIT 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1985809958/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-23_548_175005041541075187/10000
 0	val_0	1
 PREHOOK: query: -- test usage of LATERAL with a view as the LHS
 SELECT * FROM view2 LATERAL VIEW explode(array(1,2,3)) myTable AS myCol
 ORDER BY key ASC, myCol ASC LIMIT 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1381956889/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-28_949_9146465034013760763/10000
 POSTHOOK: query: -- test usage of LATERAL with a view as the LHS
 SELECT * FROM view2 LATERAL VIEW explode(array(1,2,3)) myTable AS myCol
 ORDER BY key ASC, myCol ASC LIMIT 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1381956889/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-28_949_9146465034013760763/10000
 0	val_0	1
 PREHOOK: query: -- test usage of TABLESAMPLE within a view
 CREATE VIEW view13 AS
 SELECT s.key
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 ON key) s
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/782478113/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-34_556_5167422066613745240/10000
 POSTHOOK: query: -- test usage of TABLESAMPLE within a view
 CREATE VIEW view13 AS
 SELECT s.key
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 ON key) s
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/782478113/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-34_556_5167422066613745240/10000
 POSTHOOK: Output: default@view13
 PREHOOK: query: DESCRIBE EXTENDED view13
 PREHOOK: type: DESCTABLE
@@ -702,19 +710,19 @@
 POSTHOOK: type: DESCTABLE
 key	int	
 	 	 
-Detailed Table Information	Table(tableName:view13, dbName:default, owner:jsichi, createTime:1264709944, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264709944}, viewOriginalText:SELECT s.key	 
+Detailed Table Information	Table(tableName:view13, dbName:default, owner:jsichi, createTime:1265309974, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265309974}, viewOriginalText:SELECT s.key	 
 FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 5 ON key) s, viewExpandedText:SELECT `s`.`key`	 	 
 FROM `srcbucket` TABLESAMPLE (BUCKET 1 OUT OF 5 ON `key`) `s`, tableType:VIRTUAL_VIEW)		 
 PREHOOK: query: SELECT * FROM view13
 ORDER BY key LIMIT 12
 PREHOOK: type: QUERY
 PREHOOK: Input: default@srcbucket
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/445413802/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-34_926_5685404605922996493/10000
 POSTHOOK: query: SELECT * FROM view13
 ORDER BY key LIMIT 12
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcbucket
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/445413802/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-34_926_5685404605922996493/10000
 0
 0
 0
@@ -740,7 +748,7 @@
       select s4.key as key, s4.value as value from src s4 where s4.key < 10) unionsrc2
 ON (unionsrc1.key = unionsrc2.key)
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/937833676/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-40_014_1922756630317109324/10000
 POSTHOOK: query: -- test usage of JOIN+UNION+AGG all within same view
 CREATE VIEW view14 AS
 SELECT unionsrc1.key as k1, unionsrc1.value as v1,
@@ -754,7 +762,7 @@
       select s4.key as key, s4.value as value from src s4 where s4.key < 10) unionsrc2
 ON (unionsrc1.key = unionsrc2.key)
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/937833676/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-40_014_1922756630317109324/10000
 POSTHOOK: Output: default@view14
 PREHOOK: query: DESCRIBE EXTENDED view14
 PREHOOK: type: DESCTABLE
@@ -765,7 +773,7 @@
 k2	string	
 v2	string	
 	 	 
-Detailed Table Information	Table(tableName:view14, dbName:default, owner:jsichi, createTime:1264709947, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:k1, type:string, comment:null), FieldSchema(name:v1, type:string, comment:null), FieldSchema(name:k2, type:string, comment:null), FieldSchema(name:v2, type:string, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1264709947}, viewOriginalText:SELECT unionsrc1.key as k1, unionsrc1.value as v1,	 
+Detailed Table Information	Table(tableName:view14, dbName:default, owner:jsichi, createTime:1265309980, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:k1, type:string, comment:null), FieldSchema(name:v1, type:string, comment:null), FieldSchema(name:k2, type:string, comment:null), FieldSchema(name:v2, type:string, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265309980}, viewOriginalText:SELECT unionsrc1.key as k1, unionsrc1.value as v1,	 
        unionsrc2.key as k2, unionsrc2.value as v2	 	 
 FROM (select 'tst1' as key, cast(count(1) as string) as value from src s1	 	 
                          UNION  ALL	 	 
@@ -788,12 +796,12 @@
 ORDER BY k1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1988629463/10000
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-40_181_5012774559220192463/10000
 POSTHOOK: query: SELECT * FROM view14
 ORDER BY k1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/1988629463/10000
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_10-59-40_181_5012774559220192463/10000
 0	val_0	0	val_0
 0	val_0	0	val_0
 0	val_0	0	val_0
@@ -817,6 +825,99 @@
 8	val_8	8	val_8
 9	val_9	9	val_9
 tst1	500	tst1	500
+PREHOOK: query: -- test usage of GROUP BY within view
+CREATE VIEW view15 AS
+SELECT key,COUNT(value) AS value_count
+FROM src
+GROUP BY key
+PREHOOK: type: CREATEVIEW
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_11-00-22_865_3019206326718416825/10000
+POSTHOOK: query: -- test usage of GROUP BY within view
+CREATE VIEW view15 AS
+SELECT key,COUNT(value) AS value_count
+FROM src
+GROUP BY key
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_11-00-22_865_3019206326718416825/10000
+POSTHOOK: Output: default@view15
+PREHOOK: query: DESCRIBE EXTENDED view15
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED view15
+POSTHOOK: type: DESCTABLE
+key	string	
+value_count	bigint	
+	 	 
+Detailed Table Information	Table(tableName:view15, dbName:default, owner:jsichi, createTime:1265310022, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value_count, type:bigint, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265310022}, viewOriginalText:SELECT key,COUNT(value) AS value_count	 
+FROM src	 	 
+GROUP BY key, viewExpandedText:SELECT `src`.`key`,COUNT(`src`.`value`) AS `value_count`	 	 
+FROM `src`	 	 
+GROUP BY `src`.`key`, tableType:VIRTUAL_VIEW)		 
+PREHOOK: query: SELECT * FROM view15
+ORDER BY value_count DESC, key
+LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_11-00-23_563_2588938435355559119/10000
+POSTHOOK: query: SELECT * FROM view15
+ORDER BY value_count DESC, key
+LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_11-00-23_563_2588938435355559119/10000
+230	5
+348	5
+401	5
+469	5
+138	4
+169	4
+277	4
+406	4
+468	4
+489	4
+PREHOOK: query: -- test usage of DISTINCT within view
+CREATE VIEW view16 AS
+SELECT DISTINCT value
+FROM src
+PREHOOK: type: CREATEVIEW
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_11-00-33_769_3018721160278846303/10000
+POSTHOOK: query: -- test usage of DISTINCT within view
+CREATE VIEW view16 AS
+SELECT DISTINCT value
+FROM src
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_11-00-33_769_3018721160278846303/10000
+POSTHOOK: Output: default@view16
+PREHOOK: query: DESCRIBE EXTENDED view16
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE EXTENDED view16
+POSTHOOK: type: DESCTABLE
+value	string	
+	 	 
+Detailed Table Information	Table(tableName:view16, dbName:default, owner:jsichi, createTime:1265310033, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:value, type:string, comment:null)], location:null, inputFormat:null, outputFormat:null, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:null, parameters:{}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1265310033}, viewOriginalText:SELECT DISTINCT value	 
+FROM src, viewExpandedText:SELECT DISTINCT `src`.`value`	 	 
+FROM `src`, tableType:VIRTUAL_VIEW)		 
+PREHOOK: query: SELECT * FROM view16
+ORDER BY value
+LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_11-00-33_910_9138213782100084136/10000
+POSTHOOK: query: SELECT * FROM view16
+ORDER BY value
+LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-02-04_11-00-33_910_9138213782100084136/10000
+val_0
+val_10
+val_100
+val_103
+val_104
+val_105
+val_11
+val_111
+val_113
+val_114
 PREHOOK: query: -- this should work since currently we don't track view->table
 -- dependencies for implementing RESTRICT
 DROP TABLE table1
@@ -896,6 +997,16 @@
 POSTHOOK: query: DROP VIEW view14
 POSTHOOK: type: DROPVIEW
 POSTHOOK: Output: default@view14
+PREHOOK: query: DROP VIEW view15
+PREHOOK: type: DROPVIEW
+POSTHOOK: query: DROP VIEW view15
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Output: default@view15
+PREHOOK: query: DROP VIEW view16
+PREHOOK: type: DROPVIEW
+POSTHOOK: query: DROP VIEW view16
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Output: default@view16
 PREHOOK: query: DROP TEMPORARY FUNCTION test_translate
 PREHOOK: type: DROPFUNCTION
 POSTHOOK: query: DROP TEMPORARY FUNCTION test_translate