You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by nz...@apache.org on 2010/07/23 01:57:42 UTC

svn commit: r966909 [2/27] - in /hadoop/hive/trunk: ./ contrib/ data/warehouse/src/ hbase-handler/ hwi/ jdbc/ odbc/ ql/ ql/src/test/org/apache/hadoop/hive/ql/ ql/src/test/queries/clientnegative/ ql/src/test/queries/clientpositive/ ql/src/test/results/c...

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_insert_outputformat.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_insert_outputformat.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_insert_outputformat.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_insert_outputformat.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-DROP TABLE table_test_output_format;
+
 
 CREATE TABLE table_test_output_format(key INT, value STRING) STORED AS
   INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
@@ -7,9 +7,9 @@ CREATE TABLE table_test_output_format(ke
 FROM src
 INSERT OVERWRITE TABLE table_test_output_format SELECT src.key, src.value LIMIT 10;
 describe table_test_output_format;
-DROP TABLE table_test_output_format;
 
-DROP TABLE table_test_output_format_sequencefile;
+
+
 CREATE TABLE table_test_output_format_sequencefile(key INT, value STRING) STORED AS
   INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
   OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat';
@@ -17,9 +17,9 @@ CREATE TABLE table_test_output_format_se
 FROM src
 INSERT OVERWRITE TABLE table_test_output_format_sequencefile SELECT src.key, src.value LIMIT 10;
 describe table_test_output_format_sequencefile;
-DROP TABLE table_test_output_format_sequencefile;
 
-DROP TABLE table_test_output_format_hivesequencefile;
+
+
 CREATE TABLE table_test_output_format_hivesequencefile(key INT, value STRING) STORED AS
   INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
   OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat';
@@ -27,4 +27,4 @@ CREATE TABLE table_test_output_format_hi
 FROM src
 INSERT OVERWRITE TABLE table_test_output_format_hivesequencefile SELECT src.key, src.value LIMIT 10;
 describe table_test_output_format_hivesequencefile;
-DROP TABLE table_test_output_format_hivesequencefile;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_like.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_like.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_like.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_like.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
-DROP TABLE table1;
-DROP TABLE table2;
-DROP TABLE table3;
+
+
+
 
 CREATE TABLE table1 (a STRING, b STRING) STORED AS TEXTFILE;
 DESCRIBE table1;
@@ -24,6 +24,6 @@ INSERT OVERWRITE TABLE table2 SELECT key
 SELECT * FROM table1;
 SELECT * FROM table2;
 
-DROP TABLE table1;
-DROP TABLE table2;
-DROP TABLE table3;
+
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_nested_type.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_nested_type.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_nested_type.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_nested_type.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-DROP TABLE table1;
+
 
 CREATE TABLE table1 (
        a STRING,
@@ -13,4 +13,4 @@ LOAD DATA LOCAL INPATH '../data/files/cr
 
 SELECT * from table1;
 
-DROP TABLE table1;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_struct_table.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_struct_table.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_struct_table.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_struct_table.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table abc;
+
 create table abc(strct struct<a:int, b:string, c:string>)
 row format delimited
   fields terminated by '\t'
@@ -9,4 +9,4 @@ overwrite into table abc;
 
 SELECT strct, strct.a, strct.b FROM abc LIMIT 10;
 
-drop table abc;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q Thu Jul 22 23:57:29 2010
@@ -17,7 +17,7 @@ DROP VIEW view16;
 DROP TEMPORARY FUNCTION test_translate;
 DROP TEMPORARY FUNCTION test_max;
 DROP TEMPORARY FUNCTION test_explode;
-DROP TABLE table1;
+
 
 SELECT * FROM src WHERE key=86;
 CREATE VIEW view1 AS SELECT value FROM src WHERE key=86;
@@ -192,7 +192,7 @@ LIMIT 10;
 
 -- this should work since currently we don't track view->table
 -- dependencies for implementing RESTRICT
-DROP TABLE table1;
+
 
 DROP VIEW view1;
 DROP VIEW view2;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/ct_case_insensitive.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/ct_case_insensitive.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/ct_case_insensitive.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/ct_case_insensitive.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,3 @@
-DROP TABLE tmp_pyang_bucket3;
 CREATE TABLE tmp_pyang_bucket3 (userId INT) CLUSTERED BY (userid) INTO 32 BUCKETS;
 DROP TABLE tmp_pyang_bucket3;
 CREATE TABLE tmp_pyang_bucket3 (userId INT) CLUSTERED BY (userid) SORTED BY (USERID) INTO 32 BUCKETS;
-DROP TABLE tmp_pyang_bucket3;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/ctas.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/ctas.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/ctas.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/ctas.q Thu Jul 22 23:57:29 2010
@@ -1,10 +1,10 @@
-drop table nzhang_ctas1;
-drop table nzhang_ctas2;
-drop table nzhang_ctas3;
-drop table nzhang_ctas4;
-drop table nzhang_ctas5;
-drop table nzhang_ctas6;
-drop table nzhang_ctas7;
+
+
+
+
+
+
+
 
 create table nzhang_Tmp(a int, b string);
 select * from nzhang_Tmp;
@@ -51,11 +51,11 @@ create table nzhang_ctas6 (key string, `
 insert overwrite table nzhang_ctas6 select key, value from src limit 10;
 create table nzhang_ctas7 as select key, `to` from nzhang_ctas6;
 
-drop table nzhang_ctas1;
-drop table nzhang_ctas2;
-drop table nzhang_ctas3;
-drop table nzhang_ctas4;
-drop table nzhang_ctas5;
-drop table nzhang_ctas6;
-drop table nzhang_ctas7;
-drop table nzhang_Tmp;
+
+
+
+
+
+
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/ddltime.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/ddltime.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/ddltime.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/ddltime.q Thu Jul 22 23:57:29 2010
@@ -19,7 +19,7 @@ insert overwrite table T1 select * from 
 
 desc extended T1;
 
-drop table T1;
+
 
 create table if not exists T2 like srcpart;
 desc extended T2;
@@ -42,4 +42,4 @@ insert overwrite table T2 partition (ds=
 
 desc extended T2 partition(ds='2010-06-01', hr='1');
 
-drop table T2;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/diff_part_input_formats.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/diff_part_input_formats.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/diff_part_input_formats.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/diff_part_input_formats.q Thu Jul 22 23:57:29 2010
@@ -6,4 +6,4 @@ ALTER TABLE part_test ADD PARTITION(ds='
 ALTER TABLE part_test SET FILEFORMAT RCFILE;
 ALTER TABLE part_test ADD PARTITION(ds='2');
 SELECT count(1) FROM part_test WHERE ds='3';
-DROP TABLE part_test;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/disable_file_format_check.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/disable_file_format_check.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/disable_file_format_check.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/disable_file_format_check.q Thu Jul 22 23:57:29 2010
@@ -5,5 +5,5 @@ load data local inpath '../data/files/kv
 create table kv_fileformat_check_seq (key string, value string) stored as sequencefile;
 load data local inpath '../data/files/kv1.txt' overwrite into table kv_fileformat_check_seq;
 
-drop table kv_fileformat_check_seq;
-drop table kv_fileformat_check_txt;
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/disable_merge_for_bucketing.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/disable_merge_for_bucketing.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/disable_merge_for_bucketing.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/disable_merge_for_bucketing.q Thu Jul 22 23:57:29 2010
@@ -2,7 +2,7 @@ set hive.enforce.bucketing = true;
 set hive.exec.reducers.max = 1;
 set hive.merge.mapredfiles=true;
 
-drop table bucket2_1;
+
 CREATE TABLE bucket2_1(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS;
 
 explain extended
@@ -18,4 +18,3 @@ select * from bucket2_1 tablesample (buc
 select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key;
 
 
-drop table bucket2_1;
\ No newline at end of file

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/drop_multi_partitions.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/drop_multi_partitions.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/drop_multi_partitions.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/drop_multi_partitions.q Thu Jul 22 23:57:29 2010
@@ -11,5 +11,5 @@ alter table mp drop partition (b='1');
 
 show partitions mp;
 
-drop table mp;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/fileformat_mix.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/fileformat_mix.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/fileformat_mix.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/fileformat_mix.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table fileformat_mix_test;
+
 
 create table fileformat_mix_test (src int, value string) partitioned by (ds string);
 alter table fileformat_mix_test set fileformat Sequencefile;
@@ -14,4 +14,3 @@ select count(1) from fileformat_mix_test
 
 select src from fileformat_mix_test;
 
-drop table fileformat_mix_test;
\ No newline at end of file

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/fileformat_sequencefile.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/fileformat_sequencefile.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/fileformat_sequencefile.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/fileformat_sequencefile.q Thu Jul 22 23:57:29 2010
@@ -14,4 +14,4 @@ INSERT OVERWRITE TABLE dest1 SELECT src.
 
 SELECT dest1.* FROM dest1;
 
-DROP TABLE dest1;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/fileformat_text.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/fileformat_text.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/fileformat_text.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/fileformat_text.q Thu Jul 22 23:57:29 2010
@@ -14,4 +14,4 @@ INSERT OVERWRITE TABLE dest1 SELECT src.
 
 SELECT dest1.* FROM dest1;
 
-DROP TABLE dest1;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/filter_join_breaktask.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/filter_join_breaktask.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/filter_join_breaktask.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/filter_join_breaktask.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-DROP TABLE filter_join_breaktask;
+
 CREATE TABLE filter_join_breaktask(key int, value string) partitioned by (ds string);
 
 INSERT OVERWRITE TABLE filter_join_breaktask PARTITION(ds='2008-04-08')
@@ -14,4 +14,3 @@ SELECT f.key, g.value 
 FROM filter_join_breaktask f JOIN filter_join_breaktask m ON( f.key = m.key AND f.ds='2008-04-08' AND m.ds='2008-04-08' AND f.key is not null) 
 JOIN filter_join_breaktask g ON(g.value = m.value AND g.ds='2008-04-08' AND m.ds='2008-04-08' AND m.value is not null AND m.value !='');
 
-DROP TABLE filter_join_breaktask;
\ No newline at end of file

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/filter_join_breaktask2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/filter_join_breaktask2.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/filter_join_breaktask2.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/filter_join_breaktask2.q Thu Jul 22 23:57:29 2010
@@ -1,7 +1,7 @@
-drop table T1;
-drop table T2;
-drop table T3;
-drop table T4;
+
+
+
+
 
 create table T1(c1 string, c2 string, c3 string, c4 string, c5 string, c6 string, c7 string) 
 partitioned by (ds string);
@@ -34,7 +34,7 @@ FROM T1 a JOIN T2 b 
      JOIN T4 d 
        ON (c.c0 = d.c0 AND c.ds='2010-04-17' AND d.ds='2010-04-17');
 
-drop table T1;
-drop table T2;
-drop table T3;
-drop table T4;
+
+
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby10.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby10.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby10.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby10.q Thu Jul 22 23:57:29 2010
@@ -1,9 +1,9 @@
 set hive.map.aggr=false;
 set hive.groupby.skewindata=true;
 
-drop table dest1;
-drop table dest2;
-drop table INPUT;
+
+
+
 
 CREATE TABLE dest1(key INT, val1 INT, val2 INT);
 CREATE TABLE dest2(key INT, val1 INT, val2 INT);
@@ -23,6 +23,6 @@ INSERT OVERWRITE TABLE dest2 SELECT INPU
 SELECT * from dest1;
 SELECT * from dest2;
 
-drop table INPUT;
-drop table dest1;
-drop table dest2;
+
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby11.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby11.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby11.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby11.q Thu Jul 22 23:57:29 2010
@@ -1,8 +1,8 @@
 set hive.map.aggr=false;
 set hive.groupby.skewindata=true;
 
-drop table dest1;
-drop table dest2;
+
+
 
 CREATE TABLE dest1(key STRING, val1 INT, val2 INT) partitioned by (ds string);
 CREATE TABLE dest2(key STRING, val1 INT, val2 INT) partitioned by (ds string);
@@ -23,5 +23,5 @@ INSERT OVERWRITE TABLE dest2  partition(
 SELECT * from dest1;
 SELECT * from dest2;
 
-drop table dest1;
-drop table dest2;
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3.q Thu Jul 22 23:57:29 2010
@@ -30,4 +30,4 @@ INSERT OVERWRITE TABLE dest1 SELECT 
 
 SELECT dest1.* FROM dest1;
 
-DROP TABLE dest1;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_map.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_map.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_map.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_map.q Thu Jul 22 23:57:29 2010
@@ -31,4 +31,4 @@ INSERT OVERWRITE TABLE dest1 SELECT
 
 SELECT dest1.* FROM dest1;
 
-DROP TABLE dest1;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_map_skew.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_map_skew.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_map_skew.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_map_skew.q Thu Jul 22 23:57:29 2010
@@ -31,4 +31,4 @@ INSERT OVERWRITE TABLE dest1 SELECT
 
 SELECT dest1.* FROM dest1;
 
-DROP TABLE dest1;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_noskew.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_noskew.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_noskew.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_noskew.q Thu Jul 22 23:57:29 2010
@@ -32,5 +32,5 @@ INSERT OVERWRITE TABLE dest1 SELECT
 
 SELECT dest1.* FROM dest1;
 
-DROP TABLE dest1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby9.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby9.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby9.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby9.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,5 @@
-drop table DEST1;
-drop table DEST2;
+
+
 
 CREATE TABLE DEST1(key INT, value STRING) STORED AS TEXTFILE;
 CREATE TABLE DEST2(key INT, val1 STRING, val2 STRING) STORED AS TEXTFILE;
@@ -16,5 +16,5 @@ INSERT OVERWRITE TABLE DEST2 SELECT SRC.
 SELECT DEST1.* FROM DEST1;
 SELECT DEST2.* FROM DEST2;
 
-drop table DEST1;
-drop table DEST2;
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/implicit_cast1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/implicit_cast1.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/implicit_cast1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/implicit_cast1.q Thu Jul 22 23:57:29 2010
@@ -9,5 +9,5 @@ SELECT implicit_test1.*
 FROM implicit_test1
 WHERE implicit_test1.a <> 0;
 
-DROP TABLE implicit_test1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/init_file.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/init_file.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/init_file.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/init_file.q Thu Jul 22 23:57:29 2010
@@ -2,4 +2,4 @@
 -- automatically by test_init_file.sql
 
 select * from tbl_created_by_init;
-drop table tbl_created_by_init;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input1.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input1.q Thu Jul 22 23:57:29 2010
@@ -5,5 +5,5 @@ DESCRIBE TEST1; 
 
 DESCRIBE TEST1; 
 
-DROP TABLE TEST1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input10.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input10.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input10.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input10.q Thu Jul 22 23:57:29 2010
@@ -5,5 +5,5 @@ DESCRIBE TEST10;
 
 DESCRIBE TEST10;
 
-DROP TABLE TEST10;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input15.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input15.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input15.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input15.q Thu Jul 22 23:57:29 2010
@@ -5,5 +5,3 @@ CREATE TABLE TEST15(key INT, value STRIN
 
 DESCRIBE TEST15;
 
-DROP TABLE TEST15;
-

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16.q Thu Jul 22 23:57:29 2010
@@ -4,4 +4,3 @@ ADD JAR ../data/files/TestSerDe.jar;
 CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT16;
 SELECT INPUT16.VALUE, INPUT16.KEY FROM INPUT16;
-DROP TABLE INPUT16;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16_cc.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16_cc.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16_cc.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input16_cc.q Thu Jul 22 23:57:29 2010
@@ -5,4 +5,4 @@ ADD JAR ../data/files/TestSerDe.jar;
 CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe'  with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC;
 SELECT INPUT16_CC.VALUE, INPUT16_CC.KEY FROM INPUT16_CC;
-DROP TABLE INPUT16_CC;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input19.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input19.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input19.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input19.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,5 @@
-drop table apachelog;
+
 create table apachelog(ipaddress STRING,identd STRING,user STRING,finishtime STRING,requestline string,returncode INT,size INT) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES (  'serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol',  'quote.delim'= '("|\\[|\\])',  'field.delim'=' ',  'serialization.null.format'='-'  ) STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/apache.access.log' INTO TABLE apachelog;
 SELECT a.* FROM apachelog a;
-drop table apachelog;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input1_limit.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input1_limit.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input1_limit.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input1_limit.q Thu Jul 22 23:57:29 2010
@@ -13,6 +13,6 @@ INSERT OVERWRITE TABLE dest2 SELECT src.
 SELECT dest1.* FROM dest1;
 SELECT dest2.* FROM dest2;
 
-DROP TABLE dest1;
-DROP TABLE dest2;
+
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input2.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input2.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input2.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,3 @@
-DROP TABLE TEST2a;
-DROP TABLE TEST2b;
-
 CREATE TABLE TEST2a(A INT, B DOUBLE) STORED AS TEXTFILE;
 DESCRIBE TEST2a;
 DESC TEST2a;
@@ -13,6 +10,3 @@ DROP TABLE TEST2b;
 
 EXPLAIN
 SHOW TABLES;
-
-DROP TABLE TEST2a;
-DROP TABLE TEST2b;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input21.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input21.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input21.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input21.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-DROP TABLE src_null;
+
 
 CREATE TABLE src_null(a STRING, b STRING, c STRING, d STRING) STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/null.txt' INTO TABLE src_null;
@@ -7,4 +7,4 @@ EXPLAIN SELECT * FROM src_null DISTRIBUT
 
 SELECT * FROM src_null DISTRIBUTE BY c SORT BY d;
 
-DROP TABLE src_null;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input22.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input22.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input22.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input22.q Thu Jul 22 23:57:29 2010
@@ -12,4 +12,4 @@ FROM (SELECT INPUT4.*, INPUT4.KEY as KEY
       FROM INPUT4) a
 ORDER BY KEY2 LIMIT 10;
 
-DROP TABLE INPUT4;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input24.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input24.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input24.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input24.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table tst;
+
 create table tst(a int, b int) partitioned by (d string);
 alter table tst add partition (d='2009-01-01');
 explain
@@ -6,4 +6,4 @@ select count(1) from tst x where x.d='20
 
 select count(1) from tst x where x.d='2009-01-01';
 
-drop table tst;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input25.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input25.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input25.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input25.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table tst;
+
 create table tst(a int, b int) partitioned by (d string);
 alter table tst add partition (d='2009-01-01');
 alter table tst add partition (d='2009-02-02');
@@ -16,4 +16,4 @@ select * from (
   select * from tst x where x.d='2009-02-02' limit 10
 ) subq;
 
-drop table tst;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input28.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input28.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input28.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input28.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table tst;
+
 create table tst(a string, b string) partitioned by (d string);
 alter table tst add partition (d='2009-01-01');
 
@@ -7,4 +7,4 @@ select tst.a, src.value from tst join sr
 
 select * from tst where tst.d='2009-01-01';
 
-drop table tst;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input3.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input3.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input3.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
-DROP TABLE TEST3a;
-DROP TABLE TEST3b;
-DROP TABLE TEST3c;
+
+
+
 
 CREATE TABLE TEST3a(A INT, B DOUBLE) STORED AS TEXTFILE; 
 DESCRIBE TEST3a; 
@@ -21,6 +21,6 @@ ALTER TABLE TEST3c REPLACE COLUMNS (R1 I
 ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 DOUBLE);
 DESCRIBE EXTENDED TEST3c;
 
-DROP TABLE TEST3a;
-DROP TABLE TEST3b;
-DROP TABLE TEST3c;
+
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input30.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input30.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input30.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input30.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,5 @@
-drop table tst_dest30;
-drop table dest30;
+
+
 
 
 create table dest30(a int);
@@ -19,5 +19,5 @@ set hive.test.mode=false;
 
 select * from tst_dest30;
 
-drop table tst_dest30;
-drop table dest30;
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input31.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input31.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input31.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input31.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,5 @@
-drop table tst_dest31;
-drop table dest31;
+
+
 
 set hive.test.mode=true;
 set hive.test.mode.prefix=tst_;
@@ -18,7 +18,7 @@ set hive.test.mode=false;
 
 select * from tst_dest31;
 
-drop table tst_dest31;
-drop table dest31;
+
+
 
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input32.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input32.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input32.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input32.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,5 @@
-drop table tst_dest32;
-drop table dest32;
+
+
 
 set hive.test.mode=true;
 set hive.test.mode.prefix=tst_;
@@ -19,7 +19,7 @@ set hive.test.mode=false;
 
 select * from tst_dest32;
 
-drop table tst_dest32;
-drop table dest32;
+
+
 
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input37.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input37.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input37.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input37.q Thu Jul 22 23:57:29 2010
@@ -13,4 +13,4 @@ FROM
 group by url;
 
 
-DROP TABLE documents;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input38.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input38.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input38.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input38.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table dest1;
+
 CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE;
 
 EXPLAIN
@@ -19,4 +19,4 @@ INSERT OVERWRITE TABLE dest1 SELECT tmap
 
 SELECT dest1.* FROM dest1;
 
-drop table dest1;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input39.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input39.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input39.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input39.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,5 @@
-drop table t1;
-drop table t2;
+
+
 
 create table t1(key string, value string) partitioned by (ds string);
 create table t2(key string, value string) partitioned by (ds string);
@@ -23,5 +23,5 @@ select count(1) from t1 join t2 on t1.ke
 
 set hive.test.mode=false;
 
-drop table t1;
-drop table t2;
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input3_limit.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input3_limit.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input3_limit.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input3_limit.q Thu Jul 22 23:57:29 2010
@@ -1,9 +1,9 @@
-DROP TABLE T1;
+
 CREATE TABLE T1(key STRING, value STRING) STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1;
 LOAD DATA LOCAL INPATH '../data/files/kv2.txt' INTO TABLE T1;
 
-DROP TABLE T2;
+
 CREATE TABLE T2(key STRING, value STRING);
 
 EXPLAIN 
@@ -13,5 +13,5 @@ INSERT OVERWRITE TABLE T2 SELECT * FROM 
 
 SELECT * FROM T2 SORT BY key, value;
 
-DROP TABLE T1;
-DROP TABLE T2;
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input4.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input4.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input4.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input4.q Thu Jul 22 23:57:29 2010
@@ -3,4 +3,4 @@ EXPLAIN
 LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4;
 LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4;
 SELECT INPUT4.VALUE, INPUT4.KEY FROM INPUT4;
-DROP TABLE INPUT4;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input40.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input40.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input40.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input40.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,5 @@
-drop table tmp_insert_test;
-drop table tmp_insert_test_p;
+
+
 
 create table tmp_insert_test (key string, value string) stored as textfile;
 load data local inpath '../data/files/kv1.txt' into table tmp_insert_test;
@@ -16,5 +16,5 @@ select * from tmp_insert_test_p where ds
 order by key;
 
 
-drop table tmp_insert_test;
-drop table tmp_insert_test_p;
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input41.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input41.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input41.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input41.q Thu Jul 22 23:57:29 2010
@@ -11,4 +11,4 @@ select * from 
 
 select * from dest_sp x order by x.cnt limit 2;
 
-drop table dest_sp;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input4_cb_delim.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input4_cb_delim.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input4_cb_delim.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input4_cb_delim.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
 CREATE TABLE INPUT4_CB(KEY STRING, VALUE STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB;
 SELECT INPUT4_CB.VALUE, INPUT4_CB.KEY FROM INPUT4_CB;
-DROP TABLE INPUT4_CB
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_columnarserde.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_columnarserde.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_columnarserde.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_columnarserde.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table input_columnarserde;
+
 CREATE TABLE input_columnarserde(a array<int>, b array<string>, c map<string,string>, d int, e string)
 ROW FORMAT SERDE
   'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
@@ -17,4 +17,3 @@ SELECT input_columnarserde.* FROM input_
 
 SELECT input_columnarserde.a[0], input_columnarserde.b[0], input_columnarserde.c['key2'], input_columnarserde.d, input_columnarserde.e FROM input_columnarserde DISTRIBUTE BY 1;
 
-drop table input_columnarserde;
\ No newline at end of file

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_dfs.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_dfs.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_dfs.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_dfs.q Thu Jul 22 23:57:29 2010
@@ -1,2 +1,2 @@
-dfs -cat ../build/ql/test/data/files/kv1.txt;
+dfs -cat ../data/files/kv1.txt;
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_lazyserde.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_lazyserde.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_lazyserde.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_lazyserde.q Thu Jul 22 23:57:29 2010
@@ -27,4 +27,4 @@ DROP TABLE dest1;
 CREATE TABLE dest1(a map<string,string>) ROW FORMAT DELIMITED FIELDS TERMINATED BY '1' ESCAPED BY '\\';
 INSERT OVERWRITE TABLE dest1 SELECT src_thrift.mstringstring FROM src_thrift DISTRIBUTE BY 1;
 SELECT * from dest1;
-DROP TABLE dest1;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_part10.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_part10.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_part10.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_part10.q Thu Jul 22 23:57:29 2010
@@ -17,4 +17,4 @@ DESCRIBE EXTENDED part_special PARTITION
 
 SELECT * FROM part_special WHERE ds='2008 04 08' AND ts = '10:11:12=455';
 
-DROP TABLE part_special;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_part2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_part2.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_part2.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_part2.q Thu Jul 22 23:57:29 2010
@@ -13,4 +13,4 @@ INSERT OVERWRITE TABLE dest2 SELECT srcp
 SELECT dest1.* FROM dest1 sort by key,value,ds,hr;
 SELECT dest2.* FROM dest2 sort by key,value,ds,hr;
 
-drop table dest2;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_part5.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_part5.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_part5.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input_part5.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table tmptable;
+
 create table tmptable(key string, value string, hr string, ds string);
 
 EXPLAIN
@@ -9,4 +9,4 @@ insert overwrite table tmptable
 SELECT x.* FROM SRCPART x WHERE x.ds = '2008-04-08' and x.key < 100;
 
 select * from tmptable x sort by x.key,x.value,x.ds,x.hr;
-drop table tmptable;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl1.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl1.q Thu Jul 22 23:57:29 2010
@@ -4,5 +4,3 @@ CREATE TABLE INPUTDDL1(key INT, value ST
 CREATE TABLE INPUTDDL1(key INT, value STRING) STORED AS TEXTFILE; 
 
 SELECT INPUTDDL1.* from INPUTDDL1;
-
-DROP TABLE INPUTDDL1;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl2.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl2.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl2.q Thu Jul 22 23:57:29 2010
@@ -2,5 +2,5 @@ EXPLAIN
 CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE;
 CREATE TABLE INPUTDDL2(key INT, value STRING) PARTITIONED BY(ds STRING, country STRING) STORED AS TEXTFILE;
 DESCRIBE INPUTDDL2;
-DROP TABLE INPUTDDL2;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl3.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl3.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl3.q Thu Jul 22 23:57:29 2010
@@ -2,4 +2,4 @@ EXPLAIN
 CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE;
 CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE;
 DESCRIBE INPUTDDL3;
-DROP TABLE INPUTDDL3;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl4.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl4.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl4.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl4.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,5 @@
 -- a simple test to test sorted/clustered syntax
-DROP TABLE INPUTDDL4;
+
 CREATE TABLE INPUTDDL4(viewTime STRING, userid INT,
                        page_url STRING, referrer_url STRING, 
                        friends ARRAY<BIGINT>, properties MAP<STRING, STRING>,
@@ -9,4 +9,4 @@ CREATE TABLE INPUTDDL4(viewTime STRING, 
     CLUSTERED BY(userid) SORTED BY(viewTime) INTO 32 BUCKETS;
 DESCRIBE INPUTDDL4;
 DESCRIBE EXTENDED INPUTDDL4;
-DROP TABLE INPUTDDL4;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl5.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl5.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl5.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl5.q Thu Jul 22 23:57:29 2010
@@ -5,4 +5,4 @@ LOAD DATA LOCAL INPATH '../data/files/kv
 DESCRIBE INPUTDDL5;
 SELECT INPUTDDL5.name from INPUTDDL5;
 SELECT count(1) FROM INPUTDDL5 WHERE INPUTDDL5.name = _UTF-8 0xE982B5E993AE;
-DROP TABLE INPUTDDL5;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl6.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl6.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl6.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl6.q Thu Jul 22 23:57:29 2010
@@ -1,7 +1,6 @@
 -- test for describe extended table
 -- test for describe extended table partition
 -- test for alter table drop partition
-DROP TABLE INPUTDDL6;
 CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09');
 LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-08');

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl7.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl7.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl7.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl7.q Thu Jul 22 23:57:29 2010
@@ -1,22 +1,22 @@
 -- test for loading into tables with the correct file format
 -- test for loading into partitions with the correct file format
 
-DROP TABLE T1;
+
 CREATE TABLE T1(name STRING) STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T1;
 SELECT COUNT(1) FROM T1;
 
-DROP TABLE T2;
+
 CREATE TABLE T2(name STRING) STORED AS SEQUENCEFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T2;
 SELECT COUNT(1) FROM T2;
 
-DROP TABLE T3;
+
 CREATE TABLE T3(name STRING) PARTITIONED BY(ds STRING) STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE T3 PARTITION (ds='2008-04-09');
 SELECT COUNT(1) FROM T3 where T3.ds='2008-04-09';
 
-DROP TABLE T4;
+
 CREATE TABLE T4(name STRING) PARTITIONED BY(ds STRING) STORED AS SEQUENCEFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1.seq' INTO TABLE T4 PARTITION (ds='2008-04-09');
 SELECT COUNT(1) FROM T4 where T4.ds='2008-04-09';
@@ -27,7 +27,7 @@ DESCRIBE EXTENDED T3 PARTITION (ds='2008
 DESCRIBE EXTENDED T4 PARTITION (ds='2008-04-09');
 
 
-DROP TABLE T1;
-DROP TABLE T2;
-DROP TABLE T3;
-DROP TABLE T4;
+
+
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl8.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl8.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl8.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl8.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-DROP TABLE INPUTDDL8;
+
 CREATE TABLE INPUTDDL8 COMMENT 'This is a thrift based table'
     PARTITIONED BY(ds STRING, country STRING)
     CLUSTERED BY(aint) SORTED BY(lint) INTO 32 BUCKETS
@@ -7,4 +7,4 @@ CREATE TABLE INPUTDDL8 COMMENT 'This is 
                           'serialization.format' = 'com.facebook.thrift.protocol.TBinaryProtocol')
     STORED AS SEQUENCEFILE;
 DESCRIBE EXTENDED INPUTDDL8;
-DROP TABLE INPUTDDL8;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/insert1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/insert1.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/insert1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/insert1.q Thu Jul 22 23:57:29 2010
@@ -1,7 +1,7 @@
-drop table insert1;
-drop table insert2;
+
+
 create table insert1(key int, value string) stored as textfile;
 create table insert2(key int, value string) stored as textfile;
 insert overwrite table insert1 select a.key, a.value from insert2 a WHERE (a.key=-1);
-drop table insert1;
-drop table insert2;
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/insertexternal1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/insertexternal1.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/insertexternal1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/insertexternal1.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table texternal;
+
 
 create table texternal(key string, val string) partitioned by (insertdate string);
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join19.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join19.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join19.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join19.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,3 @@
-drop TABLE triples;
 CREATE TABLE triples (foo string, subject string, predicate string, object string, foo2 string);
 
 EXPLAIN
@@ -56,5 +55,4 @@ WHERE
 t6.predicate='http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL'
 ) t66
 ON (t66.subject=t55.object);
-drop TABLE triples;
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join24.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join24.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join24.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join24.q Thu Jul 22 23:57:29 2010
@@ -5,4 +5,4 @@ SELECT a.key, count(1) FROM src a group 
 
 SELECT sum(a.cnt)  FROM tst1 a JOIN tst1 b ON a.key = b.key;
 
-drop table tst1;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join25.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join25.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join25.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join25.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
 set hive.mapjoin.numrows = 2;
 
-drop table dest_j1;
+
 
 CREATE TABLE dest_j1(key INT, value STRING, val2 STRING) STORED AS TEXTFILE;
 
@@ -15,5 +15,5 @@ FROM src1 x JOIN src y ON (x.key = y.key
 
 select * from dest_j1 x order by x.key;
 
-drop table dest_j1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join26.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join26.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join26.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join26.q Thu Jul 22 23:57:29 2010
@@ -13,5 +13,5 @@ JOIN srcpart z ON (x.key = z.key and z.d
 
 select * from dest_j1 x order by x.key;
 
-drop table dest_j1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join27.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join27.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join27.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join27.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table dest_j1;
+
 
 CREATE TABLE dest_j1(key INT, value STRING, val2 STRING) STORED AS TEXTFILE;
 
@@ -13,5 +13,5 @@ FROM src1 x JOIN src y ON (x.value = y.v
 
 select * from dest_j1 x order by x.key, x.value;
 
-drop table dest_j1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join28.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join28.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join28.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join28.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table dest_j1;
+
 
 CREATE TABLE dest_j1(key STRING, value STRING) STORED AS TEXTFILE;
 
@@ -19,5 +19,5 @@ FROM
 
 select * from dest_j1 x order by x.key;
 
-drop table dest_j1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join29.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join29.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join29.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join29.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,3 @@
-drop TABLE dest_j1;
-
 CREATE TABLE dest_j1(key STRING, cnt1 INT, cnt2 INT);
 
 EXPLAIN 
@@ -14,5 +12,3 @@ FROM (select x.key, count(1) as cnt from
      (select y.key, count(1) as cnt from src y group by y.key) subq2 ON (subq1.key = subq2.key);
 
 select * from dest_j1 x order by x.key;
-
-drop TABLE dest_j1;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join30.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join30.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join30.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join30.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,3 @@
-drop TABLE dest_j1;
-
 CREATE TABLE dest_j1(key INT, cnt INT);
 
 EXPLAIN
@@ -10,5 +8,3 @@ INSERT OVERWRITE TABLE dest_j1 
 SELECT /*+ MAPJOIN(x) */ x.key, count(1) FROM src1 x JOIN src y ON (x.key = y.key) group by x.key;
 
 select * from dest_j1 x order by x.key;
-
-drop TABLE dest_j1;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join31.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join31.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join31.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join31.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,3 @@
-drop TABLE dest_j1;
-
 CREATE TABLE dest_j1(key STRING, cnt INT);
 
 EXPLAIN 
@@ -16,5 +14,3 @@ FROM (select x.key, count(1) as cnt from
 group by subq1.key;
 
 select * from dest_j1 x order by x.key;
-
-drop TABLE dest_j1;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join32.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join32.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join32.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join32.q Thu Jul 22 23:57:29 2010
@@ -13,5 +13,5 @@ JOIN srcpart z ON (x.value = z.value and
 
 select * from dest_j1 x order by x.key;
 
-drop table dest_j1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join33.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join33.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join33.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join33.q Thu Jul 22 23:57:29 2010
@@ -13,5 +13,5 @@ JOIN srcpart z ON (x.value = z.value and
 
 select * from dest_j1 x order by x.key;
 
-drop table dest_j1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join34.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join34.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join34.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join34.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table dest_j1;
+
 
 CREATE TABLE dest_j1(key STRING, value STRING, val2 STRING) STORED AS TEXTFILE;
 
@@ -23,5 +23,5 @@ JOIN src1 x ON (x.key = subq1.key);
 
 select * from dest_j1 x order by x.key;
 
-drop table dest_j1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join35.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join35.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join35.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join35.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table dest_j1;
+
 
 CREATE TABLE dest_j1(key STRING, value STRING, val2 INT) STORED AS TEXTFILE;
 
@@ -23,5 +23,5 @@ JOIN src1 x ON (x.key = subq1.key);
 
 select * from dest_j1 x order by x.key;
 
-drop table dest_j1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join36.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join36.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join36.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join36.q Thu Jul 22 23:57:29 2010
@@ -1,8 +1,8 @@
 set hive.mapjoin.numrows = 2;
 
-drop table dest_j1;
-drop table tmp1;
-drop table tmp2;
+
+
+
 
 CREATE TABLE tmp1(key INT, cnt INT);
 CREATE TABLE tmp2(key INT, cnt INT);
@@ -25,5 +25,5 @@ FROM tmp1 x JOIN tmp2 y ON (x.key = y.ke
 
 select * from dest_j1 x order by x.key;
 
-drop table dest_j1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join37.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join37.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join37.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join37.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
 set hive.mapjoin.numrows = 2;
 
-drop table dest_j1;
+
 
 CREATE TABLE dest_j1(key INT, value STRING, val2 STRING) STORED AS TEXTFILE;
 
@@ -15,5 +15,5 @@ FROM src1 x JOIN src y ON (x.key = y.key
 
 select * from dest_j1 x order by x.key;
 
-drop table dest_j1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join38.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join38.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join38.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join38.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table tmp;
+
 
 create table tmp(col0 string, col1 string,col2 string,col3 string,col4 string,col5 string,col6 string,col7 string,col8 string,col9 string,col10 string,col11 string);
 
@@ -17,4 +17,4 @@ SELECT /*+ MAPJOIN(a) */ a.value, b.col5
 where b.col11 = 111
 group by a.value, b.col5;
 
-drop table tmp;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join39.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join39.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join39.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join39.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
 set hive.mapjoin.cache.numrows = 2;
 
-drop table dest_j1;
+
 
 CREATE TABLE dest_j1(key STRING, value STRING, key1 string, val2 STRING) STORED AS TEXTFILE;
 
@@ -11,5 +11,5 @@ FROM src x left outer JOIN (select * fro
 
 select * from dest_j1 x order by x.key;
 
-drop table dest_j1;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_hive_626.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_hive_626.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_hive_626.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_hive_626.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
-drop table hive_foo;
-drop table hive_bar;
-drop table hive_count;
+
+
+
 
 create table hive_foo (foo_id int, foo_name string, foo_a string, foo_b string, 
 foo_c string, foo_d string) row format delimited fields terminated by ','
@@ -25,6 +25,6 @@ select hive_foo.foo_name, hive_bar.bar_n
 hive_bar.foo_id join hive_count on hive_count.bar_id = hive_bar.bar_id;
 
 
-drop table hive_foo;
-drop table hive_bar;
-drop table hive_count;
+
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_map_ppr.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_map_ppr.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_map_ppr.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_map_ppr.q Thu Jul 22 23:57:29 2010
@@ -35,7 +35,7 @@ WHERE z.ds='2008-04-08' and z.hr=11;
 
 select * from dest_j1 x order by x.key;
 
-drop table src_copy;
-drop table src1_copy;
-drop table dest_j1;
+
+
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_rc.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_rc.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_rc.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_rc.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,5 @@
-drop table join_rc1;
-drop table join_rc2;
+
+
 create table join_rc1(key string, value string) stored as RCFile;
 create table join_rc2(key string, value string) stored as RCFile;
 insert overwrite table join_rc1 select * from src;
@@ -12,5 +12,5 @@ FROM join_rc1 JOIN join_rc2 ON join_rc1.
 select join_rc1.key, join_rc2.value
 FROM join_rc1 JOIN join_rc2 ON join_rc1.key = join_rc2.key;
 
-drop table join_rc1;
-drop table join_rc2;
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_reorder.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_reorder.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_reorder.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_reorder.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
-DROP TABLE T1;
-DROP TABLE T2;
-DROP TABLE T3;
+
+
+
 
 CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
 CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
@@ -66,6 +66,6 @@ FROM UNIQUEJOIN
   PRESERVE T3 c (c.key, c.val)
 SELECT /*+ STREAMTABLE(b) */ a.key, b.key, c.key;
 
-DROP TABLE T1;
-DROP TABLE T2;
-DROP TABLE T3;
+
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_reorder2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_reorder2.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_reorder2.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_reorder2.q Thu Jul 22 23:57:29 2010
@@ -1,7 +1,7 @@
-DROP TABLE T1;
-DROP TABLE T2;
-DROP TABLE T3;
-DROP TABLE T4;
+
+
+
+
 
 CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
 CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
@@ -38,7 +38,7 @@ FROM T1 a JOIN T2 b ON a.key = b.key
           JOIN T4 d ON a.key + 1 = d.key + 1;
 
 
-DROP TABLE T1;
-DROP TABLE T2;
-DROP TABLE T3;
-DROP TABLE T4;
+
+
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_reorder3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_reorder3.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_reorder3.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/join_reorder3.q Thu Jul 22 23:57:29 2010
@@ -1,7 +1,7 @@
-DROP TABLE T1;
-DROP TABLE T2;
-DROP TABLE T3;
-DROP TABLE T4;
+
+
+
+
 
 CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
 CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
@@ -38,7 +38,7 @@ FROM T1 a JOIN T2 b ON a.key = b.key
           JOIN T4 d ON a.key + 1 = d.key + 1;
 
 
-DROP TABLE T1;
-DROP TABLE T2;
-DROP TABLE T3;
-DROP TABLE T4;
+
+
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/lateral_view.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/lateral_view.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/lateral_view.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/lateral_view.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,5 @@
-DROP TABLE tmp_pyang_lv;
-DROP TABLE tmp_pyang_src_rcfile;
+
+
 
 CREATE TABLE tmp_pyang_lv (inputs string) STORED AS RCFILE;
 INSERT OVERWRITE TABLE tmp_pyang_lv SELECT key FROM src;
@@ -51,5 +51,5 @@ LATERAL VIEW explode(value) myTable AS m
 SELECT value, myCol from (SELECT key, array(value[0]) AS value FROM tmp_pyang_src_rcfile GROUP BY value[0], key) a
 LATERAL VIEW explode(value) myTable AS myCol;
 
-DROP TABLE tmp_pyang_src_rcfile;
-DROP TABLE tmp_pyang_lv;
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/lineage1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/lineage1.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/lineage1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/lineage1.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table dest_l1;
+
 
 CREATE TABLE dest_l1(key INT, value STRING) STORED AS TEXTFILE;
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part1.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part1.q Thu Jul 22 23:57:29 2010
@@ -1,7 +1,7 @@
 show partitions srcpart;
 
-drop table nzhang_part1;
-drop table nzhang_part2;
+
+
 
 create table if not exists nzhang_part1 like srcpart;
 create table if not exists nzhang_part2 like srcpart;
@@ -26,5 +26,5 @@ show partitions nzhang_part2;
 select * from nzhang_part1 where ds is not null and hr is not null;
 select * from nzhang_part2 where ds is not null and hr is not null;
 
-drop table nzhang_part1;
-drop table nzhang_part2;
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part10.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part10.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part10.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part10.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
 show partitions srcpart;
 
-drop table nzhang_part10;
+
 
 create table if not exists nzhang_part10 like srcpart;
 describe extended nzhang_part10;
@@ -21,4 +21,4 @@ show partitions nzhang_part10;
 
 select * from nzhang_part10 where ds is not null and hr is not null;
 
-drop table nzhang_part10;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part11.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part11.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part11.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part11.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
 show partitions srcpart;
 
-drop table nzhang_part;
+
 create table if not exists nzhang_part like srcpart;
 describe extended nzhang_part;
 
@@ -14,4 +14,4 @@ insert overwrite table nzhang_part parti
 select * from nzhang_part where ds = '2010-03-03' and hr = '11';
 select * from nzhang_part where ds = '2010-03-03' and hr = '12';
 
-drop table nzhang_part;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part12.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part12.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part12.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part12.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
 show partitions srcpart;
 
-drop table nzhang_part12;
+
 
 create table if not exists nzhang_part12 like srcpart;
 describe extended nzhang_part12;
@@ -16,4 +16,4 @@ show partitions nzhang_part12;
 
 select * from nzhang_part12 where ds is not null and hr is not null;
 
-drop table nzhang_part12;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part13.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part13.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part13.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part13.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
 show partitions srcpart;
 
-drop table nzhang_part13;
+
 
 create table if not exists nzhang_part13 like srcpart;
 describe extended nzhang_part13;
@@ -34,4 +34,4 @@ show partitions nzhang_part13;
 
 select * from nzhang_part13 where ds is not null and hr is not null;
 
-drop table nzhang_part13;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part14.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part14.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part14.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part14.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table nzhang_part14;
+
 create table if not exists nzhang_part14 (key string) 
   partitioned by (value string);
 
@@ -32,4 +32,4 @@ show partitions nzhang_part14;
 select * from nzhang_part14 where value <> 'a'
 order by key, value;
 
-drop table nzhang_part14;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part2.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part2.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part2.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table nzhang_part_bucket;
+
 create table if not exists nzhang_part_bucket (key string, value string) 
   partitioned by (ds string, hr string) 
   clustered by (key) into 10 buckets;
@@ -19,5 +19,5 @@ show partitions nzhang_part_bucket;
 select * from nzhang_part_bucket where ds='2010-03-23' and hr='11' order by key;
 select * from nzhang_part_bucket where ds='2010-03-23' and hr='12' order by key;
 
-drop table nzhang_part_bucket;
+
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part3.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part3.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part3.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
 show partitions srcpart;
 
-drop table nzhang_part3;
+
 
 create table if not exists nzhang_part3 like srcpart;
 describe extended nzhang_part3;
@@ -16,4 +16,4 @@ insert overwrite table nzhang_part3 part
 
 select * from nzhang_part3 where ds is not null and hr is not null;
 
-drop table nzhang_part3;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part4.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part4.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part4.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part4.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
 show partitions srcpart;
 
-drop table nzhang_part4;
+
 
 create table if not exists nzhang_part4 like srcpart;
 describe extended nzhang_part4;
@@ -21,4 +21,4 @@ select * from nzhang_part4 where ds='200
 
 select * from nzhang_part4 where ds is not null and hr is not null;
 
-drop table nzhang_part4;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part5.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part5.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part5.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part5.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,4 @@
-drop table nzhang_part5;
+
 
 create table if not exists nzhang_part5 (key string) partitioned by (value string);
 describe extended nzhang_part5;
@@ -19,4 +19,4 @@ show partitions nzhang_part5;
 select * from nzhang_part5 where value='val_0';
 select * from nzhang_part5 where value='val_2';
 
-drop table nzhang_part5;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part6.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part6.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part6.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part6.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
 show partitions srcpart;
 
-drop table nzhang_part6;
+
 
 create table if not exists nzhang_part6 like srcpart;
 describe extended nzhang_part6;
@@ -13,4 +13,4 @@ insert overwrite table nzhang_part6 part
 
 select * from nzhang_part6 where ds = '2010-03-03' and hr = '11';
 select * from nzhang_part6 where ds = '2010-03-03' and hr = '12';
-drop table nzhang_part6;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part7.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part7.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part7.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part7.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
 show partitions srcpart;
 
-drop table nzhang_part7;
+
 
 create table if not exists nzhang_part7 like srcpart;
 describe extended nzhang_part7;
@@ -11,4 +11,4 @@ insert overwrite table nzhang_part7 part
 show partitions nzhang_part7;
 
 select * from nzhang_part7 where ds is not null and hr is not null;
-drop table nzhang_part7;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part8.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part8.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part8.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part8.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
 show partitions srcpart;
 
-drop table nzhang_part8;
+
 
 create table if not exists nzhang_part8 like srcpart;
 describe extended nzhang_part8;
@@ -21,4 +21,4 @@ insert overwrite table nzhang_part8 part
 show partitions nzhang_part8;
 
 select * from nzhang_part8 where ds is not null and hr is not null;
-drop table nzhang_part8;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part9.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part9.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part9.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/load_dyn_part9.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
 show partitions srcpart;
 
-drop table nzhang_part9;
+
 
 create table if not exists nzhang_part9 like srcpart;
 describe extended nzhang_part9;
@@ -20,4 +20,4 @@ insert overwrite table nzhang_part9 part
 show partitions nzhang_part9;
 
 select * from nzhang_part9 where ds is not null and hr is not null;
-drop table nzhang_part9;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/loadpart1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/loadpart1.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/loadpart1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/loadpart1.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,5 @@
-drop table hive_test_src;
-drop table hive_test_dst;
+
+
 
 create table hive_test_src ( col1 string ) stored as textfile ;
 load data local inpath '../data/files/test.dat' overwrite into table hive_test_src ;
@@ -15,5 +15,5 @@ select * from hive_test_dst where pcol1=
 select * from hive_test_dst where pcol1='test_part' and pcol2='test_part';
 select * from hive_test_dst where pcol1='test_Part';
 
-drop table hive_test_src;
-drop table hive_test_dst;
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/loadpart_err.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/loadpart_err.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/loadpart_err.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/loadpart_err.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,5 @@
 set hive.cli.errors.ignore=true;
-DROP TABLE loadpart1;
+
 CREATE TABLE loadpart1(a STRING, b STRING) PARTITIONED BY (ds STRING);
 
 INSERT OVERWRITE TABLE loadpart1 PARTITION (ds='2009-01-01')
@@ -12,4 +12,4 @@ SHOW PARTITIONS loadpart1;
 LOAD DATA LOCAL INPATH '../data1/files/kv1.txt' INTO TABLE loadpart1 PARTITION(ds='2009-05-05');
 SHOW PARTITIONS loadpart1;
 
-DROP TABLE loadpart1;
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/merge1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/merge1.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/merge1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/merge1.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,3 @@
-drop table dest1;
 set hive.merge.mapredfiles=true;
 
 create table dest1(key int, val int);
@@ -14,7 +13,6 @@ select * from dest1;
 
 drop table dest1;
 
-
 create table test_src(key string, value string) partitioned by (ds string);
 create table dest1(key string);
 
@@ -29,6 +27,3 @@ set hive.merge.smallfiles.avgsize=16;
 explain
 insert overwrite table dest1 select key from test_src;
 insert overwrite table dest1 select key from test_src;
-
-drop table test_src;
-drop table dest1;
\ No newline at end of file

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/multi_insert.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/multi_insert.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/multi_insert.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/multi_insert.q Thu Jul 22 23:57:29 2010
@@ -1,5 +1,5 @@
-drop table src_multi1;
-drop table src_multi2;
+
+
 create table src_multi1 like src;
 create table src_multi2 like src;
 

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/null_column.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/null_column.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/null_column.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/null_column.q Thu Jul 22 23:57:29 2010
@@ -1,6 +1,6 @@
-drop table temp_null;
-drop table tt;
-drop table tt_b;
+
+
+
 
 create table temp_null(a int) stored as textfile;
 load data local inpath '../data/files/test.dat' overwrite into table temp_null;
@@ -18,12 +18,12 @@ select * from tt_b;
 insert overwrite directory "../build/ql/test/data/warehouse/null_columns.out" select null, null from temp_null;
 dfs -cat ../build/ql/test/data/warehouse/null_columns.out/*;
 
-drop table temp_null2;
+
 create table temp_null2 (key string, value string) partitioned by (ds string);
 insert overwrite table temp_null2 partition(ds='2010-04-01') select '1',NULL from src limit 1;
 select * from temp_null2 where ds is not null;
 
-drop table tt;
-drop table tt_b;
-drop table temp_null;
-drop table temp_null2;
+
+
+
+

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullgroup3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullgroup3.q?rev=966909&r1=966908&r2=966909&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullgroup3.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullgroup3.q Thu Jul 22 23:57:29 2010
@@ -1,4 +1,3 @@
-DROP TABLE tstparttbl;
 CREATE TABLE tstparttbl(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-09');
 LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl PARTITION (ds='2008-04-08');
@@ -6,7 +5,6 @@ explain
 select count(1) from tstparttbl;
 select count(1) from tstparttbl;
 
-DROP TABLE tstparttbl2;
 CREATE TABLE tstparttbl2(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-09');
 LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE tstparttbl2 PARTITION (ds='2008-04-08');
@@ -28,6 +26,3 @@ LOAD DATA LOCAL INPATH '../data/files/nu
 explain
 select count(1) from tstparttbl2;
 select count(1) from tstparttbl2;
-
-DROP TABLE tstparttbl;
-DROP TABLE tstparttbl2;