You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by he...@apache.org on 2010/08/18 00:33:46 UTC

svn commit: r986512 [1/9] - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/queries/clientnegative/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientnegative/ ql/src/test/results/clientpositive/

Author: heyongqiang
Date: Tue Aug 17 22:33:43 2010
New Revision: 986512

URL: http://svn.apache.org/viewvc?rev=986512&view=rev
Log:
HIVE-1548 populate inputs and outputs for all statement (namit via He Yongqiang)

Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/archive2.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename1.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename2.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename4.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/archive.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/touch.q
    hadoop/hive/trunk/ql/src/test/results/clientnegative/addpart1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/alter_non_native.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/alter_view_failure.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/archive1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/archive2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_view_failure1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_view_failure2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/invalidate_view1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/touch1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/touch2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/add_part_exist.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/alter_partition_format_loc.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/archive.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/bucket_groupby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/columnarserde_create_shortcut.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_big_view.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_view.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ct_case_insensitive.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/diff_part_input_formats.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/drop_multi_partitions.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/fileformat_mix.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/index_compact_2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/index_compact_3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input24.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input25.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input28.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_lazyserde.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/insertexternal1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/merge1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_vs_table_metadata.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/protectmode.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rename_column.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/touch.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udaf_covar_pop.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udaf_covar_samp.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udaf_ngrams.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_length.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_reverse.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_sentences.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/virtual_column.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Tue Aug 17 22:33:43 2010
@@ -150,6 +150,9 @@ Trunk -  Unreleased
     HIVE-1547 Unarchiving operation throws NPE
     (Paul Yang via namit)
 
+    HIVE-1548 populate inputs and outputs for all statement
+    (namit via He Yongqiang)
+
   TESTS
 
     HIVE-1464. improve  test query performance

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Tue Aug 17 22:33:43 2010
@@ -74,6 +74,8 @@ import org.apache.hadoop.hive.ql.plan.Al
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 
 /**
  * DDLSemanticAnalyzer.
@@ -110,10 +112,10 @@ public class DDLSemanticAnalyzer extends
   static class TablePartition {
     String tableName;
     HashMap<String, String> partSpec = null;
-    
+
     public TablePartition(){
     }
-    
+
     public TablePartition (ASTNode tblPart) throws SemanticException {
       tableName = unescapeIdentifier(tblPart.getChild(0).getText());
       if (tblPart.getChildCount() > 1) {
@@ -124,11 +126,12 @@ public class DDLSemanticAnalyzer extends
       }
     }
   }
-  
+
   public DDLSemanticAnalyzer(HiveConf conf) throws SemanticException {
     super(conf);
     // Partition can't have this name
     reservedPartitionValues.add(HiveConf.getVar(conf, ConfVars.DEFAULTPARTITIONNAME));
+
     // Partition value can't end in this suffix
     reservedPartitionValues.add(HiveConf.getVar(conf, ConfVars.METASTORE_INT_ORIGINAL));
     reservedPartitionValues.add(HiveConf.getVar(conf, ConfVars.METASTORE_INT_ARCHIVED));
@@ -137,7 +140,7 @@ public class DDLSemanticAnalyzer extends
 
   @Override
   public void analyzeInternal(ASTNode ast) throws SemanticException {
-    
+
     if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_PARTITION) {
       TablePartition tblPart = new TablePartition((ASTNode)ast.getChild(0));
       String tableName = tblPart.tableName;
@@ -149,7 +152,7 @@ public class DDLSemanticAnalyzer extends
         analyzeAlterTableProtectMode(ast, tableName, partSpec);
       } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_LOCATION) {
         analyzeAlterTableLocation(ast, tableName, partSpec);
-      } 
+      }
     } else if (ast.getToken().getType() == HiveParser.TOK_DROPTABLE) {
       analyzeDropTable(ast, false);
     } else if (ast.getToken().getType() == HiveParser.TOK_CREATEINDEX) {
@@ -217,6 +220,17 @@ public class DDLSemanticAnalyzer extends
   private void analyzeDropTable(ASTNode ast, boolean expectView)
       throws SemanticException {
     String tableName = unescapeIdentifier(ast.getChild(0).getText());
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, false);
+      // Ignore if table does not exist
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+        outputs.add(new WriteEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
+    }
+
     DropTableDesc dropTblDesc = new DropTableDesc(tableName, expectView);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         dropTblDesc), conf));
@@ -333,7 +347,7 @@ public class DDLSemanticAnalyzer extends
             indexTbl, db, indexTblPartitions);
       }
 
-      List<Task<?>> ret = handler.generateIndexBuildTaskList(baseTbl, 
+      List<Task<?>> ret = handler.generateIndexBuildTaskList(baseTbl,
           index, indexTblPartitions, baseTblPartitions, indexTbl, getInputs(), getOutputs());
       return ret;
     } catch (Exception e) {
@@ -389,6 +403,17 @@ public class DDLSemanticAnalyzer extends
       new AlterTableDesc(AlterTableTypes.ADDPROPS, expectView);
     alterTblDesc.setProps(mapProp);
     alterTblDesc.setOldName(tableName);
+
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+        outputs.add(new WriteEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
+    }
+
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), conf));
   }
@@ -402,6 +427,17 @@ public class DDLSemanticAnalyzer extends
         AlterTableTypes.ADDSERDEPROPS);
     alterTblDesc.setProps(mapProp);
     alterTblDesc.setOldName(tableName);
+
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+        outputs.add(new WriteEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
+    }
+
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), conf));
   }
@@ -417,6 +453,17 @@ public class DDLSemanticAnalyzer extends
     }
     alterTblDesc.setOldName(tableName);
     alterTblDesc.setSerdeName(serdeName);
+
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+        outputs.add(new WriteEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
+    }
+
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), conf));
   }
@@ -467,9 +514,20 @@ public class DDLSemanticAnalyzer extends
       serde = COLUMNAR_SERDE;
       break;
     }
-    
+
     AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, inputFormat,
         outputFormat, serde, storageHandler, partSpec);
+
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+        outputs.add(new WriteEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
+    }
+
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), conf));
   }
@@ -480,6 +538,17 @@ public class DDLSemanticAnalyzer extends
     String newLocation = unescapeSQLString(ast.getChild(0).getText());
 
     AlterTableDesc alterTblDesc = new AlterTableDesc (tableName, newLocation, partSpec);
+
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+        outputs.add(new WriteEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
+    }
+
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), conf));
   }
@@ -524,6 +593,16 @@ public class DDLSemanticAnalyzer extends
           "Only protect mode NO_DROP or OFFLINE supported");
     }
 
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+        outputs.add(new WriteEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
+    }
+
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), conf));
   }
@@ -531,6 +610,17 @@ public class DDLSemanticAnalyzer extends
   private void analyzeAlterTableClusterSort(ASTNode ast)
       throws SemanticException {
     String tableName = unescapeIdentifier(ast.getChild(0).getText());
+
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+        outputs.add(new WriteEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
+    }
+
     if (ast.getChildCount() == 1) {
       // This means that we want to turn off bucketing
       AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, -1,
@@ -747,8 +837,19 @@ public class DDLSemanticAnalyzer extends
   }
 
   private void analyzeAlterTableRename(ASTNode ast) throws SemanticException {
-    AlterTableDesc alterTblDesc = new AlterTableDesc(unescapeIdentifier(ast
-        .getChild(0).getText()), unescapeIdentifier(ast.getChild(1).getText()));
+    String tblName = unescapeIdentifier(ast.getChild(0).getText());
+    AlterTableDesc alterTblDesc = new AlterTableDesc(tblName,
+      unescapeIdentifier(ast.getChild(1).getText()));
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+        outputs.add(new WriteEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
+    }
+
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), conf));
   }
@@ -783,6 +884,17 @@ public class DDLSemanticAnalyzer extends
     AlterTableDesc alterTblDesc = new AlterTableDesc(tblName,
         unescapeIdentifier(ast.getChild(1).getText()), unescapeIdentifier(ast
         .getChild(2).getText()), newType, newComment, first, flagCol);
+
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+        outputs.add(new WriteEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
+    }
+
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), conf));
   }
@@ -793,6 +905,17 @@ public class DDLSemanticAnalyzer extends
     List<FieldSchema> newCols = getColumns((ASTNode) ast.getChild(1));
     AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, newCols,
         alterType);
+
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+        outputs.add(new WriteEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
+    }
+
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), conf));
   }
@@ -802,6 +925,20 @@ public class DDLSemanticAnalyzer extends
     // get table metadata
     List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
     DropTableDesc dropTblDesc = new DropTableDesc(tblName, partSpecs);
+
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
+    }
+
+    if (partSpecs != null) {
+      addTablePartsOutputs(tblName, partSpecs);
+    }
+
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         dropTblDesc), conf));
   }
@@ -819,8 +956,18 @@ public class DDLSemanticAnalyzer extends
       throws SemanticException {
 
     String tblName = unescapeIdentifier(ast.getChild(0).getText());
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
+    }
+
     // partition name to value
     List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
+    addTablePartsOutputs(tblName, partSpecs);
 
     Iterator<Map<String, String>> partIter = partSpecs.iterator();
 
@@ -882,6 +1029,17 @@ public class DDLSemanticAnalyzer extends
       throws SemanticException {
 
     String tblName = unescapeIdentifier(ast.getChild(0).getText());
+    Table tab;
+
+    try {
+      tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
+    }
+
     // partition name to value
     List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
 
@@ -889,15 +1047,17 @@ public class DDLSemanticAnalyzer extends
       AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc(
           MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, null,
           AlterTableDesc.AlterTableTypes.TOUCH);
+      outputs.add(new WriteEntity(tab));
       rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-          touchDesc), conf));
+                                                touchDesc), conf));
     } else {
+      addTablePartsOutputs(tblName, partSpecs);
       for (Map<String, String> partSpec : partSpecs) {
         AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc(
             MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, partSpec,
             AlterTableDesc.AlterTableTypes.TOUCH);
         rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-            touchDesc), conf));
+                                                  touchDesc), conf));
       }
     }
   }
@@ -912,6 +1072,17 @@ public class DDLSemanticAnalyzer extends
     String tblName = unescapeIdentifier(ast.getChild(0).getText());
     // partition name to value
     List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
+
+    try {
+      Table tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName, false);
+      if (tab != null) {
+        inputs.add(new ReadEntity(tab));
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
+    }
+    addTablePartsOutputs(tblName, partSpecs);
+
     if (partSpecs.size() > 1 ) {
       throw new SemanticException(isUnArchive ?
           ErrorMsg.UNARCHIVE_ON_MULI_PARTS.getMsg() :
@@ -1005,4 +1176,30 @@ public class DDLSemanticAnalyzer extends
       }
     }
   }
+
+  /**
+   * Add the table partitions to be modified in the output, so that it is available for the
+   * pre-execution hook. If the partition does not exist, no error is thrown.
+   */
+  private void addTablePartsOutputs(String tblName, List<Map<String, String>> partSpecs)
+    throws SemanticException {
+    Table tab;
+    try {
+      tab = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
+    }
+
+    for (Map<String, String> partSpec : partSpecs) {
+      try {
+        Partition part = db.getPartition(tab, partSpec, false);
+        if (part == null) {
+          continue;
+        }
+        outputs.add(new WriteEntity(part));
+      } catch (HiveException e) {
+        // Ignore the error if the partition does not exist
+      }
+    }
+  }
 }

Modified: hadoop/hive/trunk/ql/src/test/queries/clientnegative/archive2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/archive2.q?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/archive2.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/archive2.q Tue Aug 17 22:33:43 2010
@@ -2,4 +2,9 @@ set hive.archive.enabled = true;
 -- Tests trying to unarchive a non-archived partition
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 
-ALTER TABLE srcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12');
+drop table tstsrcpart;
+create table tstsrcpart like srcpart;
+insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12')
+select key, value from srcpart where ds='2008-04-08' and hr='12';
+
+ALTER TABLE tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12');

Modified: hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename1.q?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename1.q Tue Aug 17 22:33:43 2010
@@ -1 +1,6 @@
-alter table src change src_not_exist key_value string;
\ No newline at end of file
+drop table tstsrc;
+create table tstsrc like src;
+insert overwrite table tstsrc
+select key, value from src;
+
+alter table tstsrc change src_not_exist key_value string;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename2.q?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename2.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename2.q Tue Aug 17 22:33:43 2010
@@ -1 +1,6 @@
-alter table src change key value string;
\ No newline at end of file
+drop table tstsrc;
+create table tstsrc like src;
+insert overwrite table tstsrc
+select key, value from src;
+
+alter table tstsrc change key value string;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename4.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename4.q?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename4.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/column_rename4.q Tue Aug 17 22:33:43 2010
@@ -1 +1,6 @@
-alter table src change key key2 string after key_value;
\ No newline at end of file
+drop table tstsrc;
+create table tstsrc like src;
+insert overwrite table tstsrc
+select key, value from src;
+
+alter table tstsrc change key key2 string after key_value;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/archive.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/archive.q?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/archive.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/archive.q Tue Aug 17 22:33:43 2010
@@ -1,46 +1,69 @@
 set hive.archive.enabled = true;
 set hive.enforce.bucketing = true;
 
+drop table tstsrc;
+drop table tstsrcpart;
+
+create table tstsrc like src;
+insert overwrite table tstsrc select key, value from src;
+
+create table tstsrcpart like srcpart;
+
+insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11')
+select key, value from srcpart where ds='2008-04-08' and hr='11';
+
+insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12')
+select key, value from srcpart where ds='2008-04-08' and hr='12';
+
+insert overwrite table tstsrcpart partition (ds='2008-04-09', hr='11')
+select key, value from srcpart where ds='2008-04-09' and hr='11';
+
+insert overwrite table tstsrcpart partition (ds='2008-04-09', hr='12')
+select key, value from srcpart where ds='2008-04-09' and hr='12';
+
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 
-SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col 
-FROM (SELECT * FROM srcpart WHERE ds='2008-04-08') subq1) subq2;
+SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col
+FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2;
 
-ALTER TABLE srcpart ARCHIVE PARTITION (ds='2008-04-08', hr='12');
+ALTER TABLE tstsrcpart ARCHIVE PARTITION (ds='2008-04-08', hr='12');
 
-SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col 
-FROM (SELECT * FROM srcpart WHERE ds='2008-04-08') subq1) subq2;
+SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col
+FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2;
 
-SELECT key, count(1) FROM srcpart WHERE ds='2008-04-08' AND hr='12' AND key='0' GROUP BY key;
+SELECT key, count(1) FROM tstsrcpart WHERE ds='2008-04-08' AND hr='12' AND key='0' GROUP BY key;
 
-SELECT * FROM srcpart a JOIN src b ON a.key=b.key 
+SELECT * FROM tstsrcpart a JOIN tstsrc b ON a.key=b.key
 WHERE a.ds='2008-04-08' AND a.hr='12' AND a.key='0';
 
-ALTER TABLE srcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12');
+ALTER TABLE tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12');
 
-SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col 
-FROM (SELECT * FROM srcpart WHERE ds='2008-04-08') subq1) subq2;
+SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col
+FROM (SELECT * FROM tstsrcpart WHERE ds='2008-04-08') subq1) subq2;
 
-CREATE TABLE harbucket(key INT) 
+CREATE TABLE harbucket(key INT)
 PARTITIONED by (ds STRING)
 CLUSTERED BY (key) INTO 10 BUCKETS;
 
-INSERT OVERWRITE TABLE harbucket PARTITION(ds='1') SELECT CAST(key AS INT) AS a FROM src WHERE key < 50;
+INSERT OVERWRITE TABLE harbucket PARTITION(ds='1') SELECT CAST(key AS INT) AS a FROM tstsrc WHERE key < 50;
 
 SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key;
-ALTER TABLE srcpart ARCHIVE PARTITION (ds='2008-04-08', hr='12');
+ALTER TABLE tstsrcpart ARCHIVE PARTITION (ds='2008-04-08', hr='12');
 SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key;
-ALTER TABLE srcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12');
+ALTER TABLE tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12');
 SELECT key FROM harbucket TABLESAMPLE(BUCKET 1 OUT OF 10) SORT BY key;
 
 
-CREATE TABLE old_name(key INT) 
+CREATE TABLE old_name(key INT)
 PARTITIONED by (ds STRING);
 
-INSERT OVERWRITE TABLE old_name PARTITION(ds='1') SELECT CAST(key AS INT) AS a FROM src WHERE key < 50;
+INSERT OVERWRITE TABLE old_name PARTITION(ds='1') SELECT CAST(key AS INT) AS a FROM tstsrc WHERE key < 50;
 ALTER TABLE old_name ARCHIVE PARTITION (ds='1');
-SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col 
+SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col
 FROM (SELECT * FROM old_name WHERE ds='1') subq1) subq2;
 ALTER TABLE old_name RENAME TO new_name;
-SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col 
+SELECT SUM(hash(col)) FROM (SELECT transform(*) using 'tr "\t" "_"' AS col
 FROM (SELECT * FROM new_name WHERE ds='1') subq1) subq2;
+
+drop table tstsrc;
+drop table tstsrcpart;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/touch.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/touch.q?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/touch.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/touch.q Tue Aug 17 22:33:43 2010
@@ -1,3 +1,17 @@
-ALTER TABLE src TOUCH;
-ALTER TABLE srcpart TOUCH;
-ALTER TABLE srcpart TOUCH PARTITION (ds='2008-04-08', hr='12');
\ No newline at end of file
+drop table tstsrc;
+drop table tstsrcpart;
+
+create table tstsrc like src;
+insert overwrite table tstsrc select key, value from src;
+
+create table tstsrcpart like srcpart;
+insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12')
+select key, value from srcpart where ds='2008-04-08' and hr='12';
+
+
+ALTER TABLE tstsrc TOUCH;
+ALTER TABLE tstsrcpart TOUCH;
+ALTER TABLE tstsrcpart TOUCH PARTITION (ds='2008-04-08', hr='12');
+
+drop table tstsrc;
+drop table tstsrcpart;

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/addpart1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/addpart1.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/addpart1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/addpart1.q.out Tue Aug 17 22:33:43 2010
@@ -5,8 +5,10 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@addpart1
 PREHOOK: query: alter table addpart1 add partition (b='f', c='s')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@addpart1
 POSTHOOK: query: alter table addpart1 add partition (b='f', c='s')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@addpart1
 POSTHOOK: Output: default@addpart1@b=f/c=s
 PREHOOK: query: show partitions addpart1
 PREHOOK: type: SHOWPARTITIONS
@@ -15,5 +17,6 @@ POSTHOOK: type: SHOWPARTITIONS
 b=f/c=s
 PREHOOK: query: alter table addpart1 add partition (b='f', c='')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@addpart1
 FAILED: Error in metadata: get partition: Value for key c is null or empty
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/alter_non_native.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/alter_non_native.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/alter_non_native.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/alter_non_native.q.out Tue Aug 17 22:33:43 2010
@@ -8,5 +8,7 @@ POSTHOOK: Output: default@non_native1
 PREHOOK: query: -- we do not support ALTER TABLE on non-native tables yet
 ALTER TABLE non_native1 RENAME TO new_non_native
 PREHOOK: type: ALTERTABLE_RENAME
+PREHOOK: Input: default@non_native1
+PREHOOK: Output: default@non_native1
 FAILED: Error in metadata: Cannot use ALTER TABLE on a non-native table
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/alter_view_failure.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/alter_view_failure.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/alter_view_failure.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/alter_view_failure.q.out Tue Aug 17 22:33:43 2010
@@ -4,12 +4,14 @@ POSTHOOK: query: DROP VIEW xxx3
 POSTHOOK: type: DROPVIEW
 PREHOOK: query: CREATE VIEW xxx3 AS SELECT * FROM src
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-22_12-07-35_608_1201726124226717789/10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-08-10_293_51772665808484032/-mr-10000
 POSTHOOK: query: CREATE VIEW xxx3 AS SELECT * FROM src
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/data/users/jsichi/open/hive-trunk/build/ql/scratchdir/hive_2010-03-22_12-07-35_608_1201726124226717789/10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-08-10_293_51772665808484032/-mr-10000
 POSTHOOK: Output: default@xxx3
 PREHOOK: query: ALTER TABLE xxx3 REPLACE COLUMNS (xyz int)
 PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@xxx3
+PREHOOK: Output: default@xxx3
 FAILED: Error in metadata: Cannot use this form of ALTER TABLE on a view
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out Tue Aug 17 22:33:43 2010
@@ -5,5 +5,7 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@altern1
 PREHOOK: query: alter table altern1 replace columns(a int, b int, ds string)
 PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@altern1
+PREHOOK: Output: default@altern1
 Invalid table columns : Partition column name ds conflicts with table columns.
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/archive1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/archive1.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/archive1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/archive1.q.out Tue Aug 17 22:33:43 2010
@@ -23,11 +23,17 @@ POSTHOOK: Lineage: srcpart_archived PART
 POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: ALTER TABLE srcpart_archived ARCHIVE PARTITION (ds='2008-04-08', hr='12')
 PREHOOK: type: ALTERTABLE_ARCHIVE
+PREHOOK: Input: default@srcpart_archived
+PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
 POSTHOOK: query: ALTER TABLE srcpart_archived ARCHIVE PARTITION (ds='2008-04-08', hr='12')
 POSTHOOK: type: ALTERTABLE_ARCHIVE
+POSTHOOK: Input: default@srcpart_archived
+POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
 POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: ALTER TABLE srcpart_archived ARCHIVE PARTITION (ds='2008-04-08', hr='12')
 PREHOOK: type: ALTERTABLE_ARCHIVE
+PREHOOK: Input: default@srcpart_archived
+PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
 FAILED: Error in metadata: Specified partition is already archived
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/archive2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/archive2.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/archive2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/archive2.q.out Tue Aug 17 22:33:43 2010
@@ -1,7 +1,33 @@
 PREHOOK: query: -- Tests trying to unarchive a non-archived partition
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 
-ALTER TABLE srcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12')
+drop table tstsrcpart
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- Tests trying to unarchive a non-archived partition
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
+
+drop table tstsrcpart
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstsrcpart like srcpart
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tstsrcpart like srcpart
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tstsrcpart
+PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12')
+select key, value from srcpart where ds='2008-04-08' and hr='12'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
+POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12')
+select key, value from srcpart where ds='2008-04-08' and hr='12'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: ALTER TABLE tstsrcpart UNARCHIVE PARTITION (ds='2008-04-08', hr='12')
 PREHOOK: type: ALTERTABLE_UNARCHIVE
+PREHOOK: Input: default@tstsrcpart
+PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
 FAILED: Error in metadata: Specified partition is not archived
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename1.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename1.q.out Tue Aug 17 22:33:43 2010
@@ -1,4 +1,27 @@
-PREHOOK: query: alter table src change src_not_exist key_value string
+PREHOOK: query: drop table tstsrc
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstsrc
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstsrc like src
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tstsrc like src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tstsrc
+PREHOOK: query: insert overwrite table tstsrc
+select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@tstsrc
+POSTHOOK: query: insert overwrite table tstsrc
+select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@tstsrc
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: alter table tstsrc change src_not_exist key_value string
 PREHOOK: type: ALTERTABLE_RENAMECOL
+PREHOOK: Input: default@tstsrc
+PREHOOK: Output: default@tstsrc
 Column 'src_not_exist' does not exist
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename2.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename2.q.out Tue Aug 17 22:33:43 2010
@@ -1,4 +1,27 @@
-PREHOOK: query: alter table src change key value string
+PREHOOK: query: drop table tstsrc
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstsrc
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstsrc like src
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tstsrc like src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tstsrc
+PREHOOK: query: insert overwrite table tstsrc
+select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@tstsrc
+POSTHOOK: query: insert overwrite table tstsrc
+select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@tstsrc
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: alter table tstsrc change key value string
 PREHOOK: type: ALTERTABLE_RENAMECOL
+PREHOOK: Input: default@tstsrc
+PREHOOK: Output: default@tstsrc
 Column 'value' exists
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename4.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename4.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/column_rename4.q.out Tue Aug 17 22:33:43 2010
@@ -1,4 +1,27 @@
-PREHOOK: query: alter table src change key key2 string after key_value
+PREHOOK: query: drop table tstsrc
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstsrc
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstsrc like src
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tstsrc like src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tstsrc
+PREHOOK: query: insert overwrite table tstsrc
+select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@tstsrc
+POSTHOOK: query: insert overwrite table tstsrc
+select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@tstsrc
+POSTHOOK: Lineage: tstsrc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tstsrc.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: alter table tstsrc change key key2 string after key_value
 PREHOOK: type: ALTERTABLE_RENAMECOL
+PREHOOK: Input: default@tstsrc
+PREHOOK: Output: default@tstsrc
 Column 'key_value' does not exist
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_view_failure1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_view_failure1.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_view_failure1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_view_failure1.q.out Tue Aug 17 22:33:43 2010
@@ -6,5 +6,7 @@ POSTHOOK: Output: default@xxx1
 PREHOOK: query: -- Can't use DROP VIEW on a base table
 DROP VIEW xxx1
 PREHOOK: type: DROPVIEW
+PREHOOK: Input: default@xxx1
+PREHOOK: Output: default@xxx1
 FAILED: Error in metadata: Cannot drop a base table with DROP VIEW
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_view_failure2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_view_failure2.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_view_failure2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_view_failure2.q.out Tue Aug 17 22:33:43 2010
@@ -5,13 +5,15 @@ POSTHOOK: type: DROPVIEW
 PREHOOK: query: -- Can't use DROP TABLE on a view
 CREATE VIEW xxx6 AS SELECT key FROM src
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/tmp/890782486/10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-08-27_750_3628035822001608649/-mr-10000
 POSTHOOK: query: -- Can't use DROP TABLE on a view
 CREATE VIEW xxx6 AS SELECT key FROM src
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/Users/jsichi/open/hive-trunk/build/ql/tmp/890782486/10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-08-27_750_3628035822001608649/-mr-10000
 POSTHOOK: Output: default@xxx6
 PREHOOK: query: DROP TABLE xxx6
 PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@xxx6
+PREHOOK: Output: default@xxx6
 FAILED: Error in metadata: Cannot drop a view with DROP TABLE
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out Tue Aug 17 22:33:43 2010
@@ -5,6 +5,7 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@external2
 PREHOOK: query: alter table external2 add partition (ds='2008-01-01') location 'invalidscheme://data.s3ndemo.hive/pkv/2008-01-01'
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@external2
 FAILED: Error in metadata: MetaException(message:Got exception: java.io.IOException No FileSystem for scheme: invalidscheme)
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
 PREHOOK: query: describe external2 partition (ds='2008-01-01')

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/invalidate_view1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/invalidate_view1.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/invalidate_view1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/invalidate_view1.q.out Tue Aug 17 22:33:43 2010
@@ -19,20 +19,22 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@xxx10
 PREHOOK: query: CREATE VIEW xxx9 AS SELECT * FROM xxx10
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-43-31_670_5934534815948379913/10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-08-55_315_6053999460183300682/-mr-10000
 POSTHOOK: query: CREATE VIEW xxx9 AS SELECT * FROM xxx10
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-43-31_670_5934534815948379913/10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-08-55_315_6053999460183300682/-mr-10000
 POSTHOOK: Output: default@xxx9
 PREHOOK: query: CREATE VIEW xxx8 AS SELECT * FROM xxx9 xxx
 PREHOOK: type: CREATEVIEW
-PREHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-43-31_706_855643045189903710/10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-08-55_367_8780672183290065174/-mr-10000
 POSTHOOK: query: CREATE VIEW xxx8 AS SELECT * FROM xxx9 xxx
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Output: file:/tmp/jssarma/hive_2010-07-21_13-43-31_706_855643045189903710/10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-08-55_367_8780672183290065174/-mr-10000
 POSTHOOK: Output: default@xxx8
 PREHOOK: query: ALTER TABLE xxx10 REPLACE COLUMNS (key int)
 PREHOOK: type: ALTERTABLE_REPLACECOLS
+PREHOOK: Input: default@xxx10
+PREHOOK: Output: default@xxx10
 POSTHOOK: query: ALTER TABLE xxx10 REPLACE COLUMNS (key int)
 POSTHOOK: type: ALTERTABLE_REPLACECOLS
 POSTHOOK: Input: default@xxx10

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part.q.out Tue Aug 17 22:33:43 2010
@@ -13,42 +13,50 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl_protectmode3
 PREHOOK: query: alter table tbl_protectmode3 add partition (p='p1')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@tbl_protectmode3
 POSTHOOK: query: alter table tbl_protectmode3 add partition (p='p1')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@tbl_protectmode3
 POSTHOOK: Output: default@tbl_protectmode3@p=p1
 PREHOOK: query: alter table tbl_protectmode3 add partition (p='p2')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@tbl_protectmode3
 POSTHOOK: query: alter table tbl_protectmode3 add partition (p='p2')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@tbl_protectmode3
 POSTHOOK: Output: default@tbl_protectmode3@p=p2
 PREHOOK: query: select * from tbl_protectmode3 where p='p1'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl_protectmode3@p=p1
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-10_12-52-03_048_1130501594201675939/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-01_532_2553850403435594247/-mr-10000
 POSTHOOK: query: select * from tbl_protectmode3 where p='p1'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl_protectmode3@p=p1
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-10_12-52-03_048_1130501594201675939/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-01_532_2553850403435594247/-mr-10000
 PREHOOK: query: select * from tbl_protectmode3 where p='p2'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl_protectmode3@p=p2
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-10_12-52-03_269_993918999524192390/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-01_723_1137621695919041270/-mr-10000
 POSTHOOK: query: select * from tbl_protectmode3 where p='p2'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl_protectmode3@p=p2
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-10_12-52-03_269_993918999524192390/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-01_723_1137621695919041270/-mr-10000
 PREHOOK: query: alter table tbl_protectmode3 partition (p='p1') enable offline
 PREHOOK: type: ALTERPARTITION_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode3
+PREHOOK: Output: default@tbl_protectmode3
 POSTHOOK: query: alter table tbl_protectmode3 partition (p='p1') enable offline
 POSTHOOK: type: ALTERPARTITION_PROTECTMODE
+POSTHOOK: Input: default@tbl_protectmode3
 POSTHOOK: Input: default@tbl_protectmode3@p=p1
+POSTHOOK: Output: default@tbl_protectmode3
 POSTHOOK: Output: default@tbl_protectmode3@p=p1
 PREHOOK: query: select * from tbl_protectmode3 where p='p2'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl_protectmode3@p=p2
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-10_12-52-03_610_5521102361534273246/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-02_059_4407769363171845870/-mr-10000
 POSTHOOK: query: select * from tbl_protectmode3 where p='p2'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl_protectmode3@p=p2
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-10_12-52-03_610_5521102361534273246/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-02_059_4407769363171845870/-mr-10000
 FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode3 Partition p=p1

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part1.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part1.q.out Tue Aug 17 22:33:43 2010
@@ -18,13 +18,17 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl_protectmode5
 PREHOOK: query: alter table tbl_protectmode5 add partition (p='p1')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@tbl_protectmode5
 POSTHOOK: query: alter table tbl_protectmode5 add partition (p='p1')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@tbl_protectmode5
 POSTHOOK: Output: default@tbl_protectmode5@p=p1
 PREHOOK: query: alter table tbl_protectmode5 add partition (p='p2')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@tbl_protectmode5
 POSTHOOK: query: alter table tbl_protectmode5 add partition (p='p2')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@tbl_protectmode5
 POSTHOOK: Output: default@tbl_protectmode5@p=p2
 PREHOOK: query: insert overwrite table tbl_protectmode5_1
 select col from tbl_protectmode5 where p='p1'
@@ -51,9 +55,13 @@ POSTHOOK: Lineage: tbl_protectmode5_1.co
 POSTHOOK: Lineage: tbl_protectmode5_1.col SIMPLE [(tbl_protectmode5)tbl_protectmode5.FieldSchema(name:col, type:string, comment:null), ]
 PREHOOK: query: alter table tbl_protectmode5 partition (p='p1') enable offline
 PREHOOK: type: ALTERPARTITION_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode5
+PREHOOK: Output: default@tbl_protectmode5
 POSTHOOK: query: alter table tbl_protectmode5 partition (p='p1') enable offline
 POSTHOOK: type: ALTERPARTITION_PROTECTMODE
+POSTHOOK: Input: default@tbl_protectmode5
 POSTHOOK: Input: default@tbl_protectmode5@p=p1
+POSTHOOK: Output: default@tbl_protectmode5
 POSTHOOK: Output: default@tbl_protectmode5@p=p1
 POSTHOOK: Lineage: tbl_protectmode5_1.col SIMPLE [(tbl_protectmode5)tbl_protectmode5.FieldSchema(name:col, type:string, comment:null), ]
 POSTHOOK: Lineage: tbl_protectmode5_1.col SIMPLE [(tbl_protectmode5)tbl_protectmode5.FieldSchema(name:col, type:string, comment:null), ]

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part2.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part2.q.out Tue Aug 17 22:33:43 2010
@@ -13,8 +13,10 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl_protectmode6
 PREHOOK: query: alter table tbl_protectmode6 add partition (p='p1')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@tbl_protectmode6
 POSTHOOK: query: alter table tbl_protectmode6 add partition (p='p1')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@tbl_protectmode6
 POSTHOOK: Output: default@tbl_protectmode6@p=p1
 PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' OVERWRITE INTO TABLE tbl_protectmode6 partition (p='p1')
 PREHOOK: type: LOAD
@@ -23,8 +25,12 @@ POSTHOOK: type: LOAD
 POSTHOOK: Output: default@tbl_protectmode6@p=p1
 PREHOOK: query: alter table tbl_protectmode6 partition (p='p1') enable offline
 PREHOOK: type: ALTERPARTITION_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode6
+PREHOOK: Output: default@tbl_protectmode6
 POSTHOOK: query: alter table tbl_protectmode6 partition (p='p1') enable offline
 POSTHOOK: type: ALTERPARTITION_PROTECTMODE
+POSTHOOK: Input: default@tbl_protectmode6
 POSTHOOK: Input: default@tbl_protectmode6@p=p1
+POSTHOOK: Output: default@tbl_protectmode6
 POSTHOOK: Output: default@tbl_protectmode6@p=p1
 FAILED: Error in semantic analysis: org.apache.hadoop.hive.ql.parse.SemanticException: Query against an offline table or partition tbl_protectmode6:p=p1

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out Tue Aug 17 22:33:43 2010
@@ -13,14 +13,20 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl_protectmode_no_drop
 PREHOOK: query: alter table tbl_protectmode_no_drop add partition (p='p1')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@tbl_protectmode_no_drop
 POSTHOOK: query: alter table tbl_protectmode_no_drop add partition (p='p1')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@tbl_protectmode_no_drop
 POSTHOOK: Output: default@tbl_protectmode_no_drop@p=p1
 PREHOOK: query: alter table tbl_protectmode_no_drop partition (p='p1') enable no_drop
 PREHOOK: type: ALTERPARTITION_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode_no_drop
+PREHOOK: Output: default@tbl_protectmode_no_drop
 POSTHOOK: query: alter table tbl_protectmode_no_drop partition (p='p1') enable no_drop
 POSTHOOK: type: ALTERPARTITION_PROTECTMODE
+POSTHOOK: Input: default@tbl_protectmode_no_drop
 POSTHOOK: Input: default@tbl_protectmode_no_drop@p=p1
+POSTHOOK: Output: default@tbl_protectmode_no_drop
 POSTHOOK: Output: default@tbl_protectmode_no_drop@p=p1
 PREHOOK: query: desc extended tbl_protectmode_no_drop partition (p='p1')
 PREHOOK: type: DESCTABLE
@@ -30,8 +36,10 @@ c1	string	
 c2	string	
 p	string	
 	 	 
-Detailed Partition Information	Partition(values:[p1], dbName:default, tableName:tbl_protectmode_no_drop, createTime:1281469940, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:c1, type:string, comment:null), FieldSchema(name:c2, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode_no_drop/p=p1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{last_modified_by=njain, last_modified_time=1281469940, PROTECT_MODE=NO_DROP, transient_lastDdlTime=1281469940})	
+Detailed Partition Information	Partition(values:[p1], dbName:default, tableName:tbl_protectmode_no_drop, createTime:1282025356, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:c1, type:string, comment:null), FieldSchema(name:c2, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode_no_drop/p=p1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{last_modified_by=njain, last_modified_time=1282025356, PROTECT_MODE=NO_DROP, transient_lastDdlTime=1282025356})	
 PREHOOK: query: drop table tbl_protectmode_no_drop
 PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tbl_protectmode_no_drop
+PREHOOK: Output: default@tbl_protectmode_no_drop
 FAILED: Error in metadata: Table tbl_protectmode_no_drop Partitionp=p1 is protected from being dropped
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl1.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl1.q.out Tue Aug 17 22:33:43 2010
@@ -14,13 +14,15 @@ POSTHOOK: Output: default@tbl_protectmod
 PREHOOK: query: select * from tbl_protectmode_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl_protectmode_1
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-10_12-52-21_331_4986102848710196881/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-17_086_5927096345092925728/-mr-10000
 POSTHOOK: query: select * from tbl_protectmode_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl_protectmode_1
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-10_12-52-21_331_4986102848710196881/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-17_086_5927096345092925728/-mr-10000
 PREHOOK: query: alter table tbl_protectmode_1 enable offline
 PREHOOK: type: ALTERTABLE_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode_1
+PREHOOK: Output: default@tbl_protectmode_1
 POSTHOOK: query: alter table tbl_protectmode_1 enable offline
 POSTHOOK: type: ALTERTABLE_PROTECTMODE
 POSTHOOK: Input: default@tbl_protectmode_1

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl2.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl2.q.out Tue Aug 17 22:33:43 2010
@@ -13,23 +13,31 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl_protectmode2
 PREHOOK: query: alter table tbl_protectmode2 add partition (p='p1')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@tbl_protectmode2
 POSTHOOK: query: alter table tbl_protectmode2 add partition (p='p1')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@tbl_protectmode2
 POSTHOOK: Output: default@tbl_protectmode2@p=p1
 PREHOOK: query: alter table tbl_protectmode2 enable no_drop
 PREHOOK: type: ALTERTABLE_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode2
+PREHOOK: Output: default@tbl_protectmode2
 POSTHOOK: query: alter table tbl_protectmode2 enable no_drop
 POSTHOOK: type: ALTERTABLE_PROTECTMODE
 POSTHOOK: Input: default@tbl_protectmode2
 POSTHOOK: Output: default@tbl_protectmode2
 PREHOOK: query: alter table tbl_protectmode2 enable offline
 PREHOOK: type: ALTERTABLE_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode2
+PREHOOK: Output: default@tbl_protectmode2
 POSTHOOK: query: alter table tbl_protectmode2 enable offline
 POSTHOOK: type: ALTERTABLE_PROTECTMODE
 POSTHOOK: Input: default@tbl_protectmode2
 POSTHOOK: Output: default@tbl_protectmode2
 PREHOOK: query: alter table tbl_protectmode2 disable no_drop
 PREHOOK: type: ALTERTABLE_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode2
+PREHOOK: Output: default@tbl_protectmode2
 POSTHOOK: query: alter table tbl_protectmode2 disable no_drop
 POSTHOOK: type: ALTERTABLE_PROTECTMODE
 POSTHOOK: Input: default@tbl_protectmode2
@@ -41,5 +49,5 @@ POSTHOOK: type: DESCTABLE
 col	string	
 p	string	
 	 	 
-Detailed Table Information	Table(tableName:tbl_protectmode2, dbName:default, owner:njain, createTime:1281469941, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p, type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1281469942, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1281469942}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:tbl_protectmode2, dbName:default, owner:njain, createTime:1282025357, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p, type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1282025358, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1282025358}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode2

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl3.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl3.q.out Tue Aug 17 22:33:43 2010
@@ -14,13 +14,15 @@ POSTHOOK: Output: default@tbl_protectmod
 PREHOOK: query: select col from tbl_protectmode_4
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl_protectmode_4
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-10_12-52-22_797_804698083585859425/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-18_644_2317445421479072812/-mr-10000
 POSTHOOK: query: select col from tbl_protectmode_4
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl_protectmode_4
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-10_12-52-22_797_804698083585859425/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-18_644_2317445421479072812/-mr-10000
 PREHOOK: query: alter table tbl_protectmode_4 enable offline
 PREHOOK: type: ALTERTABLE_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode_4
+PREHOOK: Output: default@tbl_protectmode_4
 POSTHOOK: query: alter table tbl_protectmode_4 enable offline
 POSTHOOK: type: ALTERTABLE_PROTECTMODE
 POSTHOOK: Input: default@tbl_protectmode_4
@@ -31,5 +33,5 @@ POSTHOOK: query: desc extended tbl_prote
 POSTHOOK: type: DESCTABLE
 col	string	
 	 	 
-Detailed Table Information	Table(tableName:tbl_protectmode_4, dbName:default, owner:njain, createTime:1281469942, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode_4, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=njain, last_modified_time=1281469945, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1281469945}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:tbl_protectmode_4, dbName:default, owner:njain, createTime:1282025358, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode_4, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=njain, last_modified_time=1282025361, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1282025361}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode_4

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl4.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl4.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl4.q.out Tue Aug 17 22:33:43 2010
@@ -22,23 +22,31 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl_protectmode_tbl4
 PREHOOK: query: alter table tbl_protectmode_tbl4 add partition (p='p1')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@tbl_protectmode_tbl4
 POSTHOOK: query: alter table tbl_protectmode_tbl4 add partition (p='p1')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@tbl_protectmode_tbl4
 POSTHOOK: Output: default@tbl_protectmode_tbl4@p=p1
 PREHOOK: query: alter table tbl_protectmode_tbl4 enable no_drop
 PREHOOK: type: ALTERTABLE_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode_tbl4
+PREHOOK: Output: default@tbl_protectmode_tbl4
 POSTHOOK: query: alter table tbl_protectmode_tbl4 enable no_drop
 POSTHOOK: type: ALTERTABLE_PROTECTMODE
 POSTHOOK: Input: default@tbl_protectmode_tbl4
 POSTHOOK: Output: default@tbl_protectmode_tbl4
 PREHOOK: query: alter table tbl_protectmode_tbl4 enable offline
 PREHOOK: type: ALTERTABLE_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode_tbl4
+PREHOOK: Output: default@tbl_protectmode_tbl4
 POSTHOOK: query: alter table tbl_protectmode_tbl4 enable offline
 POSTHOOK: type: ALTERTABLE_PROTECTMODE
 POSTHOOK: Input: default@tbl_protectmode_tbl4
 POSTHOOK: Output: default@tbl_protectmode_tbl4
 PREHOOK: query: alter table tbl_protectmode_tbl4 disable no_drop
 PREHOOK: type: ALTERTABLE_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode_tbl4
+PREHOOK: Output: default@tbl_protectmode_tbl4
 POSTHOOK: query: alter table tbl_protectmode_tbl4 disable no_drop
 POSTHOOK: type: ALTERTABLE_PROTECTMODE
 POSTHOOK: Input: default@tbl_protectmode_tbl4
@@ -50,5 +58,5 @@ POSTHOOK: type: DESCTABLE
 col	string	
 p	string	
 	 	 
-Detailed Table Information	Table(tableName:tbl_protectmode_tbl4, dbName:default, owner:njain, createTime:1281469946, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode_tbl4, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p, type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1281469946, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1281469946}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:tbl_protectmode_tbl4, dbName:default, owner:njain, createTime:1282025362, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode_tbl4, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p, type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1282025362, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1282025362}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode_tbl4

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl5.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl5.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl5.q.out Tue Aug 17 22:33:43 2010
@@ -22,23 +22,31 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl_protectmode_tbl5
 PREHOOK: query: alter table tbl_protectmode_tbl5 add partition (p='p1')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@tbl_protectmode_tbl5
 POSTHOOK: query: alter table tbl_protectmode_tbl5 add partition (p='p1')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@tbl_protectmode_tbl5
 POSTHOOK: Output: default@tbl_protectmode_tbl5@p=p1
 PREHOOK: query: alter table tbl_protectmode_tbl5 enable no_drop
 PREHOOK: type: ALTERTABLE_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode_tbl5
+PREHOOK: Output: default@tbl_protectmode_tbl5
 POSTHOOK: query: alter table tbl_protectmode_tbl5 enable no_drop
 POSTHOOK: type: ALTERTABLE_PROTECTMODE
 POSTHOOK: Input: default@tbl_protectmode_tbl5
 POSTHOOK: Output: default@tbl_protectmode_tbl5
 PREHOOK: query: alter table tbl_protectmode_tbl5 enable offline
 PREHOOK: type: ALTERTABLE_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode_tbl5
+PREHOOK: Output: default@tbl_protectmode_tbl5
 POSTHOOK: query: alter table tbl_protectmode_tbl5 enable offline
 POSTHOOK: type: ALTERTABLE_PROTECTMODE
 POSTHOOK: Input: default@tbl_protectmode_tbl5
 POSTHOOK: Output: default@tbl_protectmode_tbl5
 PREHOOK: query: alter table tbl_protectmode_tbl5 disable no_drop
 PREHOOK: type: ALTERTABLE_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode_tbl5
+PREHOOK: Output: default@tbl_protectmode_tbl5
 POSTHOOK: query: alter table tbl_protectmode_tbl5 disable no_drop
 POSTHOOK: type: ALTERTABLE_PROTECTMODE
 POSTHOOK: Input: default@tbl_protectmode_tbl5
@@ -50,5 +58,5 @@ POSTHOOK: type: DESCTABLE
 col	string	
 p	string	
 	 	 
-Detailed Table Information	Table(tableName:tbl_protectmode_tbl5, dbName:default, owner:njain, createTime:1281469947, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode_tbl5, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p, type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1281469948, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1281469948}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:tbl_protectmode_tbl5, dbName:default, owner:njain, createTime:1282025363, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode_tbl5, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p, type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1282025363, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1282025363}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode_tbl5

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_tbl_no_drop.q.out Tue Aug 17 22:33:43 2010
@@ -14,13 +14,15 @@ POSTHOOK: Output: default@tbl_protectmod
 PREHOOK: query: select * from tbl_protectmode__no_drop
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl_protectmode__no_drop
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-10_12-52-29_070_3321305319848064834/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-24_500_202960055467493836/-mr-10000
 POSTHOOK: query: select * from tbl_protectmode__no_drop
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl_protectmode__no_drop
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-10_12-52-29_070_3321305319848064834/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-08-16_23-09-24_500_202960055467493836/-mr-10000
 PREHOOK: query: alter table tbl_protectmode__no_drop enable no_drop
 PREHOOK: type: ALTERTABLE_PROTECTMODE
+PREHOOK: Input: default@tbl_protectmode__no_drop
+PREHOOK: Output: default@tbl_protectmode__no_drop
 POSTHOOK: query: alter table tbl_protectmode__no_drop enable no_drop
 POSTHOOK: type: ALTERTABLE_PROTECTMODE
 POSTHOOK: Input: default@tbl_protectmode__no_drop
@@ -31,8 +33,10 @@ POSTHOOK: query: desc extended tbl_prote
 POSTHOOK: type: DESCTABLE
 col	string	
 	 	 
-Detailed Table Information	Table(tableName:tbl_protectmode__no_drop, dbName:default, owner:njain, createTime:1281469949, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode__no_drop, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=njain, last_modified_time=1281469949, PROTECT_MODE=NO_DROP, transient_lastDdlTime=1281469949}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:tbl_protectmode__no_drop, dbName:default, owner:njain, createTime:1282025364, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode__no_drop, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=njain, last_modified_time=1282025364, PROTECT_MODE=NO_DROP, transient_lastDdlTime=1282025364}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: drop table tbl_protectmode__no_drop
 PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tbl_protectmode__no_drop
+PREHOOK: Output: default@tbl_protectmode__no_drop
 FAILED: Error in metadata: Table tbl_protectmode__no_drop is protected from being dropped
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/touch1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/touch1.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/touch1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/touch1.q.out Tue Aug 17 22:33:43 2010
@@ -1,4 +1,5 @@
 PREHOOK: query: ALTER TABLE srcpart TOUCH PARTITION (ds='2008-04-08', hr='13')
 PREHOOK: type: ALTERTABLE_TOUCH
+PREHOOK: Input: default@srcpart
 FAILED: Error in metadata: Specified partition does not exist
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/touch2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/touch2.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/touch2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/touch2.q.out Tue Aug 17 22:33:43 2010
@@ -1,4 +1,5 @@
 PREHOOK: query: ALTER TABLE src TOUCH PARTITION (ds='2008-04-08', hr='12')
 PREHOOK: type: ALTERTABLE_TOUCH
+PREHOOK: Input: default@src
 FAILED: Error in metadata: table is not partitioned but partition spec exists: {ds=2008-04-08, hr=12}
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/add_part_exist.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/add_part_exist.q.out?rev=986512&r1=986511&r2=986512&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/add_part_exist.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/add_part_exist.q.out Tue Aug 17 22:33:43 2010
@@ -9,8 +9,10 @@ POSTHOOK: query: SHOW PARTITIONS add_par
 POSTHOOK: type: SHOWPARTITIONS
 PREHOOK: query: ALTER TABLE add_part_test ADD PARTITION (ds='2010-01-01')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@add_part_test
 POSTHOOK: query: ALTER TABLE add_part_test ADD PARTITION (ds='2010-01-01')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@add_part_test
 POSTHOOK: Output: default@add_part_test@ds=2010-01-01
 PREHOOK: query: SHOW PARTITIONS add_part_test
 PREHOOK: type: SHOWPARTITIONS
@@ -19,8 +21,12 @@ POSTHOOK: type: SHOWPARTITIONS
 ds=2010-01-01
 PREHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@add_part_test
+PREHOOK: Output: default@add_part_test@ds=2010-01-01
 POSTHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@add_part_test
+POSTHOOK: Output: default@add_part_test@ds=2010-01-01
 PREHOOK: query: SHOW PARTITIONS add_part_test
 PREHOOK: type: SHOWPARTITIONS
 POSTHOOK: query: SHOW PARTITIONS add_part_test
@@ -28,8 +34,10 @@ POSTHOOK: type: SHOWPARTITIONS
 ds=2010-01-01
 PREHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-02')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@add_part_test
 POSTHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-02')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@add_part_test
 POSTHOOK: Output: default@add_part_test@ds=2010-01-02
 PREHOOK: query: SHOW PARTITIONS add_part_test
 PREHOOK: type: SHOWPARTITIONS
@@ -39,8 +47,14 @@ ds=2010-01-01
 ds=2010-01-02
 PREHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PARTITION (ds='2010-01-02') PARTITION (ds='2010-01-03')
 PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@add_part_test
+PREHOOK: Output: default@add_part_test@ds=2010-01-01
+PREHOOK: Output: default@add_part_test@ds=2010-01-02
 POSTHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PARTITION (ds='2010-01-02') PARTITION (ds='2010-01-03')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@add_part_test
+POSTHOOK: Output: default@add_part_test@ds=2010-01-01
+POSTHOOK: Output: default@add_part_test@ds=2010-01-02
 POSTHOOK: Output: default@add_part_test@ds=2010-01-03
 PREHOOK: query: SHOW PARTITIONS add_part_test
 PREHOOK: type: SHOWPARTITIONS