You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by rm...@apache.org on 2009/09/29 03:25:30 UTC

svn commit: r819792 [3/24] - in /hadoop/hive/trunk: ./ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/test/results/clientnegative/ contrib/src/test/results/clientpositive/ data/conf/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apac...

Added: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java?rev=819792&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java (added)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PostExecutePrinter.java Tue Sep 29 01:25:15 2009
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.hooks;
+
+import java.util.Set;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+
+/**
+ * Implementation of a post execute hook that simply prints out its
+ * parameters to standard output.
+ */
+public class PostExecutePrinter implements PostExecute {
+
+  @Override
+  public void run(SessionState sess, Set<ReadEntity> inputs,
+      Set<WriteEntity> outputs, UserGroupInformation ugi)
+    throws Exception {
+
+    LogHelper console = SessionState.getConsole();
+
+    if (console == null)
+      return;
+
+    if (sess != null) {
+      console.printError("POSTHOOK: query: " + sess.getCmd().trim());
+      console.printError("POSTHOOK: type: " + sess.getCommandType());
+    }
+
+    for(ReadEntity re: inputs) {
+      console.printError("POSTHOOK: Input: " + re.toString());
+    }
+    for(WriteEntity we: outputs) {
+      console.printError("POSTHOOK: Output: " + we.toString());
+    }
+  }
+
+}

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/hooks/PreExecutePrinter.java Tue Sep 29 01:25:15 2009
@@ -31,22 +31,24 @@
 
   @Override
   public void run(SessionState sess, Set<ReadEntity> inputs,
-      Set<WriteEntity> outputs, UserGroupInformation ugi) 
+      Set<WriteEntity> outputs, UserGroupInformation ugi)
     throws Exception {
 
     LogHelper console = SessionState.getConsole();
 
     if (console == null)
       return;
-    
-    if (sess != null)
-      console.printError("query: " + sess.getCmd().trim());
+
+    if (sess != null) {
+      console.printError("PREHOOK: query: " + sess.getCmd().trim());
+      console.printError("PREHOOK: type: " + sess.getCommandType());
+    }
 
     for(ReadEntity re: inputs) {
-      console.printError("Input: " + re.toString());
+      console.printError("PREHOOK: Input: " + re.toString());
     }
     for(WriteEntity we: outputs) {
-      console.printError("Output: " + we.toString());
+      console.printError("PREHOOK: Output: " + we.toString());
     }
   }
 

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/altern1.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,13 @@
-query: drop table altern1
-query: create table altern1(a int, b int) partitioned by (ds string)
-query: alter table altern1 replace columns(a int, b int, ds string)
+PREHOOK: query: drop table altern1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table altern1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table altern1(a int, b int) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table altern1(a int, b int) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@altern1
+PREHOOK: query: alter table altern1 replace columns(a int, b int, ds string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
 Invalid table columns : Partition column name ds conflicts with table columns.
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_sample_clause.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_sample_clause.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_sample_clause.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/bad_sample_clause.q.out Tue Sep 29 01:25:15 2009
@@ -1,2 +1,6 @@
-query: CREATE TABLE dest1(key INT, value STRING, dt STRING, hr STRING) STORED AS TEXTFILE
+PREHOOK: query: CREATE TABLE dest1(key INT, value STRING, dt STRING, hr STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING, dt STRING, hr STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
 FAILED: Error in semantic analysis: Sampling Expression Needed for Non-Bucketed Table srcpart

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbydistributeby.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbydistributeby.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbydistributeby.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbydistributeby.q.out Tue Sep 29 01:25:15 2009
@@ -1,2 +1,6 @@
-query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
 FAILED: Error in semantic analysis: line 8:14 Cannot have both Cluster By and Distribute By Clauses tkey

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbysortby.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbysortby.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbysortby.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/clusterbysortby.q.out Tue Sep 29 01:25:15 2009
@@ -1,2 +1,6 @@
-query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
 FAILED: Error in semantic analysis: line 8:8 Cannot have both Cluster By and Sort By Clauses one

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/create_insert_outputformat.q.out Tue Sep 29 01:25:15 2009
@@ -1,2 +1,5 @@
-query: DROP TABLE table_test_output_fomat
+PREHOOK: query: DROP TABLE table_test_output_fomat
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE table_test_output_fomat
+POSTHOOK: type: DROPTABLE
 FAILED: Error in semantic analysis: Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/create_udaf_failure.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/create_udaf_failure.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/create_udaf_failure.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/create_udaf_failure.q.out Tue Sep 29 01:25:15 2009
@@ -1,2 +1,5 @@
-query: CREATE TEMPORARY FUNCTION test_udaf AS 'org.apache.hadoop.hive.ql.udf.UDAFWrongArgLengthForTestCase'
+PREHOOK: query: CREATE TEMPORARY FUNCTION test_udaf AS 'org.apache.hadoop.hive.ql.udf.UDAFWrongArgLengthForTestCase'
+PREHOOK: type: CREATEFUNCTION
+POSTHOOK: query: CREATE TEMPORARY FUNCTION test_udaf AS 'org.apache.hadoop.hive.ql.udf.UDAFWrongArgLengthForTestCase'
+POSTHOOK: type: CREATEFUNCTION
 FAILED: Error in semantic analysis: org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException: public boolean org.apache.hadoop.hive.ql.udf.UDAFWrongArgLengthForTestCase$UDAFWrongArgLengthForTestCaseEvaluator.merge() requires 0 arguments but 1 are passed in.

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/create_unknown_genericudf.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/create_unknown_genericudf.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/create_unknown_genericudf.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/create_unknown_genericudf.q.out Tue Sep 29 01:25:15 2009
@@ -1,2 +1,3 @@
-query: CREATE TEMPORARY FUNCTION dummy_genericudf AS 'org.apache.hadoop.hive.ql.udf.generic.DummyGenericUDF'
+PREHOOK: query: CREATE TEMPORARY FUNCTION dummy_genericudf AS 'org.apache.hadoop.hive.ql.udf.generic.DummyGenericUDF'
+PREHOOK: type: CREATEFUNCTION
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/create_unknown_udf_udaf.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/create_unknown_udf_udaf.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/create_unknown_udf_udaf.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/create_unknown_udf_udaf.q.out Tue Sep 29 01:25:15 2009
@@ -1,2 +1,3 @@
-query: CREATE TEMPORARY FUNCTION dummy_function AS 'org.apache.hadoop.hive.ql.udf.DummyFunction'
+PREHOOK: query: CREATE TEMPORARY FUNCTION dummy_function AS 'org.apache.hadoop.hive.ql.udf.DummyFunction'
+PREHOOK: type: CREATEFUNCTION
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/deletejar.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/deletejar.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/deletejar.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/deletejar.q.out Tue Sep 29 01:25:15 2009
@@ -1,4 +1,8 @@
-query: DROP TABLE DELETEJAR
-query: CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE
+PREHOOK: query: DROP TABLE DELETEJAR
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE DELETEJAR
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE DELETEJAR(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
 FAILED: Error in metadata: Cannot validate serde: org.apache.hadoop.hive.serde2.TestSerDe
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath1.q.out Tue Sep 29 01:25:15 2009
@@ -1,3 +1,4 @@
-query: describe src_thrift.$elem$
+PREHOOK: query: describe src_thrift.$elem$
+PREHOOK: type: DESCTABLE
 FAILED: Error in metadata: java.lang.RuntimeException: cannot find field $elem$ from [public int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, public java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, public java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString]
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath2.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath2.q.out Tue Sep 29 01:25:15 2009
@@ -1,3 +1,4 @@
-query: describe src_thrift.$key$
+PREHOOK: query: describe src_thrift.$key$
+PREHOOK: type: DESCTABLE
 FAILED: Error in metadata: java.lang.RuntimeException: cannot find field $key$ from [public int org.apache.hadoop.hive.serde2.thrift.test.Complex.aint, public java.lang.String org.apache.hadoop.hive.serde2.thrift.test.Complex.aString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lint, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lString, public java.util.List org.apache.hadoop.hive.serde2.thrift.test.Complex.lintString, public java.util.Map org.apache.hadoop.hive.serde2.thrift.test.Complex.mStringString]
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath3.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath3.q.out Tue Sep 29 01:25:15 2009
@@ -1,3 +1,4 @@
-query: describe src_thrift.lint.abc
+PREHOOK: query: describe src_thrift.lint.abc
+PREHOOK: type: DESCTABLE
 FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Error in getting fields from serde.Unknown type for abc
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath4.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath4.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/describe_xpath4.q.out Tue Sep 29 01:25:15 2009
@@ -1,3 +1,4 @@
-query: describe src_thrift.mStringString.abc
+PREHOOK: query: describe src_thrift.mStringString.abc
+PREHOOK: type: DESCTABLE
 FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Error in getting fields from serde.Unknown type for abc
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_native_udf.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_native_udf.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_native_udf.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_native_udf.q.out Tue Sep 29 01:25:15 2009
@@ -1,2 +1,3 @@
-query: DROP TEMPORARY FUNCTION max
+PREHOOK: query: DROP TEMPORARY FUNCTION max
+PREHOOK: type: DROPFUNCTION
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/external1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/external1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/external1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/external1.q.out Tue Sep 29 01:25:15 2009
@@ -1,6 +1,11 @@
-query: drop table external1
-query: create external table external1(a int, b int) location 'invalidscheme://data.s3ndemo.hive/kv'
+PREHOOK: query: drop table external1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table external1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create external table external1(a int, b int) location 'invalidscheme://data.s3ndemo.hive/kv'
+PREHOOK: type: CREATETABLE
 FAILED: Error in metadata: MetaException(message:Got exception: java.io.IOException No FileSystem for scheme: invalidscheme)
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
-query: describe external1
+PREHOOK: query: describe external1
+PREHOOK: type: DESCTABLE
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/external2.q.out Tue Sep 29 01:25:15 2009
@@ -1,7 +1,16 @@
-query: drop table external2
-query: create external table external2(a int, b int) partitioned by (ds string)
-query: alter table external2 add partition (ds='2008-01-01') location 'invalidscheme://data.s3ndemo.hive/pkv/2008-01-01'
+PREHOOK: query: drop table external2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table external2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create external table external2(a int, b int) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create external table external2(a int, b int) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@external2
+PREHOOK: query: alter table external2 add partition (ds='2008-01-01') location 'invalidscheme://data.s3ndemo.hive/pkv/2008-01-01'
+PREHOOK: type: ALTERTABLE_ADDPARTS
 FAILED: Error in metadata: MetaException(message:Got exception: java.io.IOException No FileSystem for scheme: invalidscheme)
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
-query: describe external2 partition (ds='2008-01-01')
+PREHOOK: query: describe external2 partition (ds='2008-01-01')
+PREHOOK: type: DESCTABLE
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/fetchtask_ioexception.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/fetchtask_ioexception.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/fetchtask_ioexception.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/fetchtask_ioexception.q.out Tue Sep 29 01:25:15 2009
@@ -1,8 +1,23 @@
-query: CREATE TABLE fetchtask_ioexception (
+PREHOOK: query: CREATE TABLE fetchtask_ioexception (
   KEY STRING,
   VALUE STRING) STORED AS SEQUENCEFILE
-query: LOAD DATA LOCAL INPATH '../data/files/kv1_broken.seq' OVERWRITE INTO TABLE fetchtask_ioexception
-query: SELECT * FROM fetchtask_ioexception
-Input: default/fetchtask_ioexception
-Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/1939498833/10000
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE fetchtask_ioexception (
+  KEY STRING,
+  VALUE STRING) STORED AS SEQUENCEFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@fetchtask_ioexception
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1_broken.seq' OVERWRITE INTO TABLE fetchtask_ioexception
+PREHOOK: type: LOAD
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1_broken.seq' OVERWRITE INTO TABLE fetchtask_ioexception
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@fetchtask_ioexception
+PREHOOK: query: SELECT * FROM fetchtask_ioexception
+PREHOOK: type: QUERY
+PREHOOK: Input: default@fetchtask_ioexception
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/.ptest_3/build/ql/tmp/1739034966/10000
+POSTHOOK: query: SELECT * FROM fetchtask_ioexception
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@fetchtask_ioexception
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/.ptest_3/build/ql/tmp/1739034966/10000
 Failed with exception java.io.IOException:java.io.EOFException

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_input.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_input.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_input.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/fileformat_void_input.q.out Tue Sep 29 01:25:15 2009
@@ -1,8 +1,20 @@
-query: CREATE TABLE dest1(key INT, value STRING) STORED AS
+PREHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS
   INPUTFORMAT 'java.lang.Void'
   OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
-query: FROM src
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS
+  INPUTFORMAT 'java.lang.Void'
+  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src
 INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 10
-Input: default/src
-Output: default/dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
 FAILED: Error in semantic analysis: line 3:20 Input Format must implement InputFormat dest1

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out Tue Sep 29 01:25:15 2009
@@ -1,10 +1,14 @@
-query: DROP TABLE inv_valid_tbl1
-query: CREATE TABLE inv_valid_tbl1 COMMENT 'This is a thrift based table'
+PREHOOK: query: DROP TABLE inv_valid_tbl1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE inv_valid_tbl1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE inv_valid_tbl1 COMMENT 'This is a thrift based table'
     PARTITIONED BY(aint DATETIME, country STRING)
     CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS
     ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer'
     WITH SERDEPROPERTIES ('serialization.class' = 'org.apache.hadoop.hive.serde2.thrift.test.Complex',
                           'serialization.format' = 'org.apache.thrift.protocol.TBinaryProtocol')
     STORED AS SEQUENCEFILE
+PREHOOK: type: CREATETABLE
 FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Partition column name aint conflicts with table columns.
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat.q.out Tue Sep 29 01:25:15 2009
@@ -1,8 +1,19 @@
-query: -- test for loading into tables with the correct file format
+PREHOOK: query: -- test for loading into tables with the correct file format
 -- test for loading into partitions with the correct file format
 
 DROP TABLE T1
-query: CREATE TABLE T1(name STRING) STORED AS SEQUENCEFILE
-query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- test for loading into tables with the correct file format
+-- test for loading into partitions with the correct file format
+
+DROP TABLE T1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE T1(name STRING) STORED AS SEQUENCEFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE T1(name STRING) STORED AS SEQUENCEFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1
+PREHOOK: type: LOAD
 Failed with exception Wrong file format. Please check the file's format.
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MoveTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat_rc_seq.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat_rc_seq.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat_rc_seq.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat_rc_seq.q.out Tue Sep 29 01:25:15 2009
@@ -1,8 +1,20 @@
-query: -- test for loading into tables with the correct file format
+PREHOOK: query: -- test for loading into tables with the correct file format
 -- test for loading into partitions with the correct file format
 
 DROP TABLE T1
-query: CREATE TABLE T1(name STRING) STORED AS RCFILE
-query: LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- test for loading into tables with the correct file format
+-- test for loading into partitions with the correct file format
+
+DROP TABLE T1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@t1
+PREHOOK: query: CREATE TABLE T1(name STRING) STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE T1(name STRING) STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T1
+PREHOOK: type: LOAD
 Failed with exception Wrong file format. Please check the file's format.
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MoveTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/load_wrong_fileformat_txt_seq.q.out Tue Sep 29 01:25:15 2009
@@ -1,8 +1,20 @@
-query: -- test for loading into tables with the correct file format
+PREHOOK: query: -- test for loading into tables with the correct file format
 -- test for loading into partitions with the correct file format
 
 DROP TABLE T1
-query: CREATE TABLE T1(name STRING) STORED AS TEXTFILE
-query: LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- test for loading into tables with the correct file format
+-- test for loading into partitions with the correct file format
+
+DROP TABLE T1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@t1
+PREHOOK: query: CREATE TABLE T1(name STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE T1(name STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T1
+PREHOOK: type: LOAD
 Failed with exception Wrong file format. Please check the file's format.
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MoveTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/nopart_insert.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/nopart_insert.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/nopart_insert.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/nopart_insert.q.out Tue Sep 29 01:25:15 2009
@@ -1,3 +1,10 @@
-query: DROP TABLE nopart_insert
-query: CREATE TABLE nopart_insert(a STRING, b STRING) PARTITIONED BY (ds STRING)
+PREHOOK: query: DROP TABLE nopart_insert
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE nopart_insert
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE nopart_insert(a STRING, b STRING) PARTITIONED BY (ds STRING)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE nopart_insert(a STRING, b STRING) PARTITIONED BY (ds STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@nopart_insert
 FAILED: Error in semantic analysis: need to specify partition columns because the destination table is partitioned.

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/nopart_load.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/nopart_load.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/nopart_load.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/nopart_load.q.out Tue Sep 29 01:25:15 2009
@@ -1,3 +1,10 @@
-query: DROP TABLE nopart_load
-query: CREATE TABLE nopart_load(a STRING, b STRING) PARTITIONED BY (ds STRING)
+PREHOOK: query: DROP TABLE nopart_load
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE nopart_load
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE nopart_load(a STRING, b STRING) PARTITIONED BY (ds STRING)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE nopart_load(a STRING, b STRING) PARTITIONED BY (ds STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@nopart_load
 FAILED: Error in semantic analysis: need to specify partition columns because the destination table is partitioned.

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/notable_alias3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/notable_alias3.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/notable_alias3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/notable_alias3.q.out Tue Sep 29 01:25:15 2009
@@ -1,2 +1,6 @@
-query: CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE
+PREHOOK: query: CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
 FAILED: Error in semantic analysis: line 4:44 Expression Not In Group By Key src

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/orderbysortby.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/orderbysortby.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/orderbysortby.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/orderbysortby.q.out Tue Sep 29 01:25:15 2009
@@ -1,2 +1,6 @@
-query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
 FAILED: Error in semantic analysis: line 8:8 Cannot have both Order By and Sort By Clauses one

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out Tue Sep 29 01:25:15 2009
@@ -1,6 +1,11 @@
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
 FROM src
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
+FROM src
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE TOK_RECORDWRITER '../data/scripts/error_script' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue))))))
 
@@ -39,8 +44,9 @@
       limit: -1
 
 
-query: SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
+PREHOOK: query: SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
 FROM src
-Input: default/src
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1776086048/10000
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/.ptest_3/build/ql/tmp/1662782185/10000
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/show_tablestatus.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/show_tablestatus.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/show_tablestatus.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/show_tablestatus.q.out Tue Sep 29 01:25:15 2009
@@ -1,3 +1,4 @@
-query: SHOW TABLE EXTENDED LIKE `srcpar*` PARTITION(ds='2008-04-08', hr=11)
+PREHOOK: query: SHOW TABLE EXTENDED LIKE `srcpar*` PARTITION(ds='2008-04-08', hr=11)
+PREHOOK: type: SHOW_TABLESTATUS
 Table srcpar* does not exist
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/union2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/union2.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/union2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/union2.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,20 @@
-query: drop table t1
-query: drop table t2
-query: create table if not exists t1(r string, c string, v string)
-query: create table if not exists t2(s string, c string, v string)
+PREHOOK: query: drop table t1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table t1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@t1
+PREHOOK: query: drop table t2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table t2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table if not exists t1(r string, c string, v string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table if not exists t1(r string, c string, v string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t1
+PREHOOK: query: create table if not exists t2(s string, c string, v string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table if not exists t2(s string, c string, v string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t2
 FAILED: Error in semantic analysis: Schema of both sides of union should match: Column v is of type string on first table and type double on second table

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/alter1.q.out Tue Sep 29 01:25:15 2009
@@ -1,49 +1,119 @@
-query: drop table alter1
-query: create table alter1(a int, b int)
-query: describe extended alter1
-a	int	
-b	int	
-	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})	
-query: alter table alter1 set tblproperties ('a'='1', 'c'='3')
-query: describe extended alter1
-a	int	
-b	int	
-	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{last_modified_by=athusoo,c=3,last_modified_time=1241277624,a=1})	
-query: alter table alter1 set tblproperties ('a'='1', 'c'='4', 'd'='3')
-query: describe extended alter1
-a	int	
-b	int	
-	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})	
-query: alter table alter1 set serdeproperties('s1'='9')
-query: describe extended alter1
-a	int	
-b	int	
-	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})	
-query: alter table alter1 set serdeproperties('s1'='10', 's2' ='20')
-query: describe extended alter1
-a	int	
-b	int	
-	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s2=20,s1=10,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})	
-query: alter table alter1 set serde 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties('s1'='9')
-query: describe extended alter1
+PREHOOK: query: drop table alter1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table alter1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table alter1(a int, b int)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table alter1(a int, b int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@alter1
+PREHOOK: query: describe extended alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter1
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1253779697, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{})	
+PREHOOK: query: alter table alter1 set tblproperties ('a'='1', 'c'='3')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: alter table alter1 set tblproperties ('a'='1', 'c'='3')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: default@alter1
+POSTHOOK: Output: default@alter1
+PREHOOK: query: describe extended alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter1
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1253779697, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{last_modified_by=njain,c=3,last_modified_time=1253779698,a=1})	
+PREHOOK: query: alter table alter1 set tblproperties ('a'='1', 'c'='4', 'd'='3')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: query: alter table alter1 set tblproperties ('a'='1', 'c'='4', 'd'='3')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: default@alter1
+POSTHOOK: Output: default@alter1
+PREHOOK: query: describe extended alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter1
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1253779697, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1253779698,a=1})	
+PREHOOK: query: alter table alter1 set serdeproperties('s1'='9')
+PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
+POSTHOOK: query: alter table alter1 set serdeproperties('s1'='9')
+POSTHOOK: type: ALTERTABLE_SERDEPROPERTIES
+POSTHOOK: Input: default@alter1
+POSTHOOK: Output: default@alter1
+PREHOOK: query: describe extended alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter1
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1253779697, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{s1=9,serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1253779698,a=1})	
+PREHOOK: query: alter table alter1 set serdeproperties('s1'='10', 's2' ='20')
+PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
+POSTHOOK: query: alter table alter1 set serdeproperties('s1'='10', 's2' ='20')
+POSTHOOK: type: ALTERTABLE_SERDEPROPERTIES
+POSTHOOK: Input: default@alter1
+POSTHOOK: Output: default@alter1
+PREHOOK: query: describe extended alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter1
+POSTHOOK: type: DESCTABLE
+a	int	
+b	int	
+	 	 
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1253779697, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{s2=20,s1=10,serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1253779698,a=1})	
+PREHOOK: query: alter table alter1 set serde 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties('s1'='9')
+PREHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: query: alter table alter1 set serde 'org.apache.hadoop.hive.serde2.TestSerDe' with serdeproperties('s1'='9')
+POSTHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: Input: default@alter1
+POSTHOOK: Output: default@alter1
+PREHOOK: query: describe extended alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter1
+POSTHOOK: type: DESCTABLE
 a	string	from deserializer
 b	string	from deserializer
 	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.TestSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})	
-query: alter table alter1 set serde 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe'
-query: describe extended alter1
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1253779697, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:from deserializer), FieldSchema(name:b, type:string, comment:from deserializer)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.TestSerDe, parameters:{s2=20,s1=9,serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1253779698,a=1})	
+PREHOOK: query: alter table alter1 set serde 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe'
+PREHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: query: alter table alter1 set serde 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe'
+POSTHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: Input: default@alter1
+POSTHOOK: Output: default@alter1
+PREHOOK: query: describe extended alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter1
+POSTHOOK: type: DESCTABLE
 a	string	from deserializer
 b	string	from deserializer
 	 	 
-Detailed Table Information	Table(tableName:alter1,dbName:default,owner:athusoo,createTime:1241277624,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:from deserializer), FieldSchema(name:b,type:string,comment:from deserializer)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{s2=20,s1=9,serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{d=3,last_modified_by=athusoo,c=4,last_modified_time=1241277625,a=1})	
-query: alter table alter1 replace columns (a int, b int, c string)
-query: describe alter1
+Detailed Table Information	Table(tableName:alter1, dbName:default, owner:njain, createTime:1253779697, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:string, comment:from deserializer), FieldSchema(name:b, type:string, comment:from deserializer)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter1, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{s2=20,s1=9,serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{d=3,last_modified_by=njain,c=4,last_modified_time=1253779698,a=1})	
+PREHOOK: query: alter table alter1 replace columns (a int, b int, c string)
+PREHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: query: alter table alter1 replace columns (a int, b int, c string)
+POSTHOOK: type: ALTERTABLE_REPLACECOLS
+POSTHOOK: Input: default@alter1
+POSTHOOK: Output: default@alter1
+PREHOOK: query: describe alter1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe alter1
+POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 c	string	
-query: drop table alter1
+PREHOOK: query: drop table alter1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table alter1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@alter1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/alter2.q.out Tue Sep 29 01:25:15 2009
@@ -1,57 +1,128 @@
-query: drop table alter2
-query: create table alter2(a int, b int) partitioned by (insertdate string)
-query: describe extended alter2
+PREHOOK: query: drop table alter2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table alter2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table alter2(a int, b int) partitioned by (insertdate string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table alter2(a int, b int) partitioned by (insertdate string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@alter2
+PREHOOK: query: describe extended alter2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter2
+POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})	
-query: show partitions alter2
-query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
-query: describe extended alter2
+Detailed Table Information	Table(tableName:alter2, dbName:default, owner:njain, createTime:1253779701, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{})	
+PREHOOK: query: show partitions alter2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions alter2
+POSTHOOK: type: SHOWPARTITIONS
+PREHOOK: query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
+PREHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@alter2@insertdate=2008-01-01
+PREHOOK: query: describe extended alter2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter2
+POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})	
-query: show partitions alter2
+Detailed Table Information	Table(tableName:alter2, dbName:default, owner:njain, createTime:1253779701, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{})	
+PREHOOK: query: show partitions alter2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions alter2
+POSTHOOK: type: SHOWPARTITIONS
 insertdate=2008-01-01
-query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
-query: describe extended alter2
+PREHOOK: query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
+PREHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@alter2@insertdate=2008-01-02
+PREHOOK: query: describe extended alter2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter2
+POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{})	
-query: show partitions alter2
+Detailed Table Information	Table(tableName:alter2, dbName:default, owner:njain, createTime:1253779701, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{})	
+PREHOOK: query: show partitions alter2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions alter2
+POSTHOOK: type: SHOWPARTITIONS
 insertdate=2008-01-01
 insertdate=2008-01-02
-query: drop table alter2
-query: create external table alter2(a int, b int) partitioned by (insertdate string)
-query: describe extended alter2
+PREHOOK: query: drop table alter2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table alter2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@alter2
+PREHOOK: query: create external table alter2(a int, b int) partitioned by (insertdate string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create external table alter2(a int, b int) partitioned by (insertdate string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@alter2
+PREHOOK: query: describe extended alter2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter2
+POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})	
-query: show partitions alter2
-query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
-query: describe extended alter2
+Detailed Table Information	Table(tableName:alter2, dbName:default, owner:njain, createTime:1253779702, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{EXTERNAL=TRUE})	
+PREHOOK: query: show partitions alter2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions alter2
+POSTHOOK: type: SHOWPARTITIONS
+PREHOOK: query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
+PREHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: query: alter table alter2 add partition (insertdate='2008-01-01') location '2008/01/01'
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@alter2@insertdate=2008-01-01
+PREHOOK: query: describe extended alter2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter2
+POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})	
-query: show partitions alter2
+Detailed Table Information	Table(tableName:alter2, dbName:default, owner:njain, createTime:1253779702, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{EXTERNAL=TRUE})	
+PREHOOK: query: show partitions alter2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions alter2
+POSTHOOK: type: SHOWPARTITIONS
 insertdate=2008-01-01
-query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
-query: describe extended alter2
+PREHOOK: query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
+PREHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: query: alter table alter2 add partition (insertdate='2008-01-02') location '2008/01/02'
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@alter2@insertdate=2008-01-02
+PREHOOK: query: describe extended alter2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter2
+POSTHOOK: type: DESCTABLE
 a	int	
 b	int	
 insertdate	string	
 	 	 
-Detailed Table Information	Table(tableName:alter2,dbName:default,owner:athusoo,createTime:1241277629,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:int,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/athusoo/commits/hive_trunk_ws8/build/ql/test/data/warehouse/alter2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[FieldSchema(name:insertdate,type:string,comment:null)],parameters:{EXTERNAL=TRUE})	
-query: show partitions alter2
+Detailed Table Information	Table(tableName:alter2, dbName:default, owner:njain, createTime:1253779702, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:a, type:int, comment:null), FieldSchema(name:b, type:int, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter2, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:insertdate, type:string, comment:null)], parameters:{EXTERNAL=TRUE})	
+PREHOOK: query: show partitions alter2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions alter2
+POSTHOOK: type: SHOWPARTITIONS
 insertdate=2008-01-01
 insertdate=2008-01-02
-query: drop table alter2
+PREHOOK: query: drop table alter2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table alter2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@alter2

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/alter3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/alter3.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/alter3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/alter3.q.out Tue Sep 29 01:25:15 2009
@@ -1,42 +1,97 @@
-query: drop table alter3_src
-query: drop table alter3
-query: create table alter3_src ( col1 string ) stored as textfile
-query: load data local inpath '../data/files/test.dat' overwrite into table alter3_src
-query: create table alter3 ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
-query: insert overwrite table alter3 partition (pCol1='test_part', pcol2='test_part') select col1 from alter3_src
-Input: default/alter3_src
-Output: default/alter3/pcol1=test_part/pcol2=test_part
-query: select * from alter3 where pcol1='test_part' and pcol2='test_part'
-Input: default/alter3/pcol1=test_part/pcol2=test_part
-Output: file:/data/users/pchakka/workspace/oshive2/build/ql/tmp/1140751313/10000
+PREHOOK: query: drop table alter3_src
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table alter3_src
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table alter3
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table alter3
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table alter3_src ( col1 string ) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table alter3_src ( col1 string ) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@alter3_src
+PREHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter3_src
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/test.dat' overwrite into table alter3_src
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@alter3_src
+PREHOOK: query: create table alter3 ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table alter3 ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@alter3
+PREHOOK: query: insert overwrite table alter3 partition (pCol1='test_part', pcol2='test_part') select col1 from alter3_src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter3_src
+PREHOOK: Output: default@alter3@pcol1=test_part/pcol2=test_part
+POSTHOOK: query: insert overwrite table alter3 partition (pCol1='test_part', pcol2='test_part') select col1 from alter3_src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter3_src
+POSTHOOK: Output: default@alter3@pcol1=test_part/pcol2=test_part
+PREHOOK: query: select * from alter3 where pcol1='test_part' and pcol2='test_part'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter3@pcol1=test_part/pcol2=test_part
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1483627653/10000
+POSTHOOK: query: select * from alter3 where pcol1='test_part' and pcol2='test_part'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter3@pcol1=test_part/pcol2=test_part
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1483627653/10000
 1	test_part	test_part
 2	test_part	test_part
 3	test_part	test_part
 4	test_part	test_part
 5	test_part	test_part
 6	test_part	test_part
-query: alter table alter3 rename to alter3_renamed
-query: describe extended alter3_renamed
+PREHOOK: query: alter table alter3 rename to alter3_renamed
+PREHOOK: type: ALTERTABLE_RENAME
+POSTHOOK: query: alter table alter3 rename to alter3_renamed
+POSTHOOK: type: ALTERTABLE_RENAME
+POSTHOOK: Input: default@alter3
+POSTHOOK: Output: default@alter3_renamed
+PREHOOK: query: describe extended alter3_renamed
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter3_renamed
+POSTHOOK: type: DESCTABLE
 col1	string	
 pcol1	string	
 pcol2	string	
 	 	 
-Detailed Table Information	Table(tableName:alter3_renamed, dbName:default, owner:pchakka, createTime:1247544316, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:file:/data/users/pchakka/workspace/oshive2/build/ql/test/data/warehouse/alter3_renamed, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:pcol1, type:string, comment:null), FieldSchema(name:pcol2, type:string, comment:null)], parameters:{last_modified_by=pchakka,last_modified_time=1247544320})	
-query: describe extended alter3_renamed partition (pCol1='test_part', pcol2='test_part')
+Detailed Table Information	Table(tableName:alter3_renamed, dbName:default, owner:njain, createTime:1253779705, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter3_renamed, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:pcol1, type:string, comment:null), FieldSchema(name:pcol2, type:string, comment:null)], parameters:{last_modified_by=njain,last_modified_time=1253779709})	
+PREHOOK: query: describe extended alter3_renamed partition (pCol1='test_part', pcol2='test_part')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended alter3_renamed partition (pCol1='test_part', pcol2='test_part')
+POSTHOOK: type: DESCTABLE
 col1	string	
 pcol1	string	
 pcol2	string	
 	 	 
-Detailed Partition Information	Partition(values:[test_part, test_part], dbName:default, tableName:alter3_renamed, createTime:0, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:file:/data/users/pchakka/workspace/oshive2/build/ql/test/data/warehouse/alter3_renamed/pcol1=test_part/pcol2=test_part, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{})	
-query: select * from alter3_renamed where pcol1='test_part' and pcol2='test_part'
-Input: default/alter3_renamed/pcol1=test_part/pcol2=test_part
-Output: file:/data/users/pchakka/workspace/oshive2/build/ql/tmp/862867654/10000
+Detailed Partition Information	Partition(values:[test_part, test_part], dbName:default, tableName:alter3_renamed, createTime:0, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:col1, type:string, comment:null)], location:file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/alter3_renamed/pcol1=test_part/pcol2=test_part, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{})	
+PREHOOK: query: select * from alter3_renamed where pcol1='test_part' and pcol2='test_part'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter3_renamed@pcol1=test_part/pcol2=test_part
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/250127671/10000
+POSTHOOK: query: select * from alter3_renamed where pcol1='test_part' and pcol2='test_part'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter3_renamed@pcol1=test_part/pcol2=test_part
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/250127671/10000
 1	test_part	test_part
 2	test_part	test_part
 3	test_part	test_part
 4	test_part	test_part
 5	test_part	test_part
 6	test_part	test_part
-query: drop table alter3_src
-query: drop table alter3
-query: drop table alter3_renamed
+PREHOOK: query: drop table alter3_src
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table alter3_src
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@alter3_src
+PREHOOK: query: drop table alter3
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table alter3
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table alter3_renamed
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table alter3_renamed
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@alter3_renamed

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
Files hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out (original) and hadoop/hive/trunk/ql/src/test/results/clientpositive/binarysortable_1.q.out Tue Sep 29 01:25:15 2009 differ

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out Tue Sep 29 01:25:15 2009
@@ -1,7 +1,16 @@
-query: CREATE TABLE DEST1(Key INT, VALUE STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE DEST1(Key INT, VALUE STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE DEST1(Key INT, VALUE STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@DEST1
+PREHOOK: query: EXPLAIN
 FROM SRC_THRIFT
 INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM SRC_THRIFT
+INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF SRC_THRIFT)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR ([ (. (TOK_TABLE_OR_COL src_Thrift) LINT) 1)) (TOK_SELEXPR (. ([ (. (TOK_TABLE_OR_COL src_thrift) lintstring) 0) MYSTRING))) (TOK_WHERE (> ([ (. (TOK_TABLE_OR_COL src_thrift) liNT) 0) 0))))
 
@@ -47,10 +56,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/150742864/10000
+                destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/596729572/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2122837325/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/790017029/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -84,13 +93,24 @@
               name: dest1
 
 
-query: FROM SRC_THRIFT
+PREHOOK: query: FROM SRC_THRIFT
+INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM SRC_THRIFT
 INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0
-Input: default/src_thrift
-Output: default/dest1
-query: SELECT DEST1.* FROM Dest1
-Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1814455060/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT DEST1.* FROM Dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/423054543/10000
+POSTHOOK: query: SELECT DEST1.* FROM Dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/423054543/10000
 2	1
 4	8
 6	27

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out Tue Sep 29 01:25:15 2009
@@ -1,6 +1,14 @@
-query: CREATE TABLE dest1(c1 INT, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 INT, c6 STRING, c7 INT) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(c1 INT, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 INT, c6 STRING, c7 INT) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(c1 INT, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 INT, c6 STRING, c7 INT) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM src INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT) + CAST(CAST(0 AS SMALLINT) AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT) + CAST(CAST(0 AS SMALLINT) AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (+ 3 2)) (TOK_SELEXPR (+ 3.0 2)) (TOK_SELEXPR (+ 3 2.0)) (TOK_SELEXPR (+ 3.0 2.0)) (TOK_SELEXPR (+ (+ 3 (TOK_FUNCTION TOK_INT 2.0)) (TOK_FUNCTION TOK_INT (TOK_FUNCTION TOK_SMALLINT 0)))) (TOK_SELEXPR (TOK_FUNCTION TOK_BOOLEAN 1)) (TOK_SELEXPR (TOK_FUNCTION TOK_INT TRUE))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL src) key) 86))))
 
@@ -56,10 +64,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/1859655774/10000
+                destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/1071125214/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/599057975/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/517302853/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -103,10 +111,20 @@
               name: dest1
 
 
-query: FROM src INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT) + CAST(CAST(0 AS SMALLINT) AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86
-Input: default/src
-Output: default/dest1
-query: select dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/1345350120/10000
+PREHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT) + CAST(CAST(0 AS SMALLINT) AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT) + CAST(CAST(0 AS SMALLINT) AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: select dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/822287214/10000
+POSTHOOK: query: select dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/822287214/10000
 5	5.0	5.0	5.0	5	true	1