You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by dh...@apache.org on 2008/10/21 20:11:18 UTC

svn commit: r706704 [6/23] - in /hadoop/core/trunk: ./ src/contrib/hive/ src/contrib/hive/bin/ src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/ src/contrib/hive/common/src/java/org/apache/hadoop/hive/conf/ src/contrib/hive/conf/ src/contrib/hi...

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java Tue Oct 21 11:11:05 2008
@@ -0,0 +1,190 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2;
+
+import java.io.UnsupportedEncodingException;
+import java.nio.charset.CharacterCodingException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde2.objectinspector.MetadataListStructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
+
+public class TestSerDe implements SerDe {
+
+  public static final Log LOG = LogFactory.getLog(TestSerDe.class.getName());
+
+  public String getShortName() {
+    return shortName();
+  }
+
+
+  public static String shortName() {
+    return "test_meta";
+  }
+
+  static {
+    StackTraceElement[] sTrace = new Exception().getStackTrace();
+    String className = sTrace[0].getClassName();
+    try {
+      SerDeUtils.registerSerDe(shortName(), Class.forName(className));
+      // For backward compatibility: this class replaces the following class.
+      SerDeUtils.registerSerDe("org.apache.hadoop.hive.serde.TestSerDe", Class.forName(className));
+    } catch(Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+  
+  final public static String DefaultSeparator = "\002";
+
+  private String separator;
+  // constant for now, will make it configurable later.
+  private String nullString = "\\N"; 
+  private List<String> columnNames;
+  private ObjectInspector cachedObjectInspector;
+
+  public String toString() {
+    return "TestSerDe[" + separator + "," + columnNames + "]";
+  }
+
+  public TestSerDe() throws SerDeException {
+    separator = DefaultSeparator;
+  }
+
+  public void initialize(Configuration job, Properties tbl) throws SerDeException {
+    separator = DefaultSeparator;
+    String alt_sep = tbl.getProperty("testserde.default.serialization.format");
+    if(alt_sep != null && alt_sep.length() > 0) {
+      try {
+        byte b [] = new byte[1];
+        b[0] = Byte.valueOf(alt_sep).byteValue();
+        separator = new String(b);
+      } catch(NumberFormatException e) {
+        separator = alt_sep;
+      }
+    }
+
+    String columnProperty = tbl.getProperty("columns");
+    if (columnProperty == null || columnProperty.length() == 0) {
+      // Hack for tables with no columns
+      // Treat it as a table with a single column called "col" 
+      cachedObjectInspector = ObjectInspectorFactory.getReflectionObjectInspector(
+          ColumnSet.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    } else {
+      columnNames = Arrays.asList(columnProperty.split(","));
+      cachedObjectInspector = MetadataListStructObjectInspector.getInstance(columnNames);
+    }
+    LOG.info(getClass().getName() + ": initialized with columnNames: " + columnNames );
+  }
+
+  public static Object deserialize(ColumnSet c, String row, String sep, String nullString) throws Exception {
+    if (c.col == null) {
+      c.col = new ArrayList<String>();
+    } else {
+      c.col.clear();
+    }
+    String [] l1 = row.split(sep, -1);
+
+    for(String s: l1) {
+      if (s.equals(nullString)) {
+        c.col.add(null);
+      } else {
+        c.col.add(s);
+      }
+    }
+    return (c);
+  }
+  
+  ColumnSet deserializeCache = new ColumnSet();
+  public Object deserialize(Writable field) throws SerDeException {
+    String row = null;
+    if (field instanceof BytesWritable) {
+      BytesWritable b = (BytesWritable)field;
+      try {
+        row = Text.decode(b.get(), 0, b.getSize());
+      } catch (CharacterCodingException e) {
+        throw new SerDeException(e);
+      }
+    } else if (field instanceof Text) {
+      row = field.toString();
+    }
+    try {
+      deserialize(deserializeCache, row, separator, nullString);
+      if (columnNames != null) {
+        assert(columnNames.size() == deserializeCache.col.size());
+      }
+      return deserializeCache;
+    } catch (ClassCastException e) {
+      throw new SerDeException( this.getClass().getName() + " expects Text or BytesWritable", e);
+    } catch (Exception e) {
+      throw new SerDeException(e);
+    }
+  }
+  
+  
+  public ObjectInspector getObjectInspector() throws SerDeException {
+    return cachedObjectInspector;
+  }
+
+  public Class<? extends Writable> getSerializedClass() {
+    return Text.class;
+  }
+  
+  Text serializeCache = new Text();
+  public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
+
+    if (objInspector.getCategory() != Category.STRUCT) {
+      throw new SerDeException(getClass().toString() 
+          + " can only serialize struct types, but we got: " + objInspector.getTypeName());
+    }
+    StructObjectInspector soi = (StructObjectInspector) objInspector;
+    List<? extends StructField> fields = soi.getAllStructFieldRefs();
+    
+    StringBuilder sb = new StringBuilder();
+    for(int i=0; i<fields.size(); i++) {
+      if (i>0) sb.append(separator);
+      Object column = soi.getStructFieldData(obj, fields.get(i));
+      if (fields.get(i).getFieldObjectInspector().getCategory() == Category.PRIMITIVE) {
+        // For primitive object, serialize to plain string
+        sb.append(column == null ? nullString : column.toString());
+      } else {
+        // For complex object, serialize to JSON format
+        sb.append(SerDeUtils.getJSONString(column, fields.get(i).getFieldObjectInspector()));
+      }
+    }
+    serializeCache.set(sb.toString());
+    return serializeCache;
+  }
+
+}

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/strict_pruning.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/strict_pruning.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/strict_pruning.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientnegative/strict_pruning.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,6 @@
+set hive.partition.pruning=strict;
+
+EXPLAIN
+SELECT count(1) FROM srcpart;
+
+SELECT count(1) FROM srcpart;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/case_sensitivity.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/case_sensitivity.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/case_sensitivity.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/case_sensitivity.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,10 @@
+CREATE TABLE DEST1(Key INT, VALUE STRING);
+
+EXPLAIN
+FROM SRC_THRIFT
+INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0;
+
+FROM SRC_THRIFT
+INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0;
+
+SELECT DEST1.* FROM Dest1;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby1_limit.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby1_limit.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby1_limit.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby1_limit.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,10 @@
+set mapred.reduce.tasks=31;
+
+CREATE TABLE dest1(key INT, value DOUBLE);
+
+EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,4)) GROUP BY src.key LIMIT 5;
+
+FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, sum(substr(src.value,4)) GROUP BY src.key LIMIT 5;
+
+SELECT dest1.* FROM dest1;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby2_limit.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby2_limit.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby2_limit.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/groupby2_limit.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,7 @@
+set mapred.reduce.tasks=31;
+
+EXPLAIN
+SELECT src.key, sum(substr(src.value,4)) FROM src GROUP BY src.key LIMIT 5;
+
+SELECT src.key, sum(substr(src.value,4)) FROM src GROUP BY src.key LIMIT 5;
+

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,4 @@
+EXPLAIN
+SELECT x.* FROM SRC x;
+
+SELECT x.* FROM SRC x;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input11_limit.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input11_limit.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input11_limit.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input11_limit.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,10 @@
+CREATE TABLE dest1(key INT, value STRING);
+
+EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10;
+
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10;
+
+SELECT dest1.* FROM dest1;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input14_limit.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input14_limit.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input14_limit.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input14_limit.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,20 @@
+CREATE TABLE dest1(key INT, value STRING);
+
+EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value) AS (tkey, tvalue) 
+         USING '/bin/cat'
+  CLUSTER BY tkey LIMIT 20
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100;
+
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value) AS (tkey, tvalue) 
+         USING '/bin/cat'
+  CLUSTER BY tkey LIMIT 20
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100;
+
+SELECT dest1.* FROM dest1;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input16.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input16.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input16.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input16.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,5 @@
+-- TestSerDe is a user defined serde where the default delimiter is Ctrl-B
+CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERIALIZER 'org.apache.hadoop.hive.serde2.TestSerDe';
+LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT16;
+SELECT INPUT16.VALUE, INPUT16.KEY FROM INPUT16;
+DROP TABLE INPUT16;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input16_cc.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input16_cc.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input16_cc.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input16_cc.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,6 @@
+-- TestSerDe is a user defined serde where the default delimiter is Ctrl-B
+-- the user is overwriting it with ctrlC
+CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERIALIZER 'org.apache.hadoop.hive.serde2.TestSerDe'  with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val');
+LOAD DATA LOCAL INPATH '../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC;
+SELECT INPUT16_CC.VALUE, INPUT16_CC.KEY FROM INPUT16_CC;
+DROP TABLE INPUT16_CC;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input1_limit.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input1_limit.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input1_limit.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input1_limit.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,18 @@
+CREATE TABLE dest1(key INT, value STRING);
+CREATE TABLE dest2(key INT, value STRING);
+
+EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key < 100 LIMIT 5;
+
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 100 LIMIT 10
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key < 100 LIMIT 5;
+
+SELECT dest1.* FROM dest1;
+SELECT dest2.* FROM dest2;
+
+DROP TABLE dest1;
+DROP TABLE dest2;
+

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input2_limit.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input2_limit.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input2_limit.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input2_limit.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,4 @@
+EXPLAIN
+SELECT x.* FROM SRC x WHERE x.key < 300 LIMIT 5;
+
+SELECT x.* FROM SRC x WHERE x.key < 300 LIMIT 5;

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input3.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input3.q?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input3.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input3.q Tue Oct 21 11:11:05 2008
@@ -1,3 +1,6 @@
+DROP TABLE TEST3a;
+DROP TABLE TEST3b;
+DROP TABLE TEST3c;
 CREATE TABLE TEST3a(A INT, B FLOAT); 
 DESCRIBE TEST3a; 
 CREATE TABLE TEST3b(A ARRAY<INT>, B FLOAT, C MAP<FLOAT, INT>); 
@@ -12,5 +15,9 @@
 ALTER TABLE TEST3b RENAME TO TEST3c;
 DESCRIBE TEST3c; 
 SHOW TABLES;
+EXPLAIN
+ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 FLOAT);
+ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 FLOAT);
+DESCRIBE EXTENDED TEST3c;
 DROP TABLE TEST3c;
 DROP TABLE TEST3a;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input4_cb_delim.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input4_cb_delim.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input4_cb_delim.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input4_cb_delim.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,4 @@
+CREATE TABLE INPUT4_CB(KEY STRING, VALUE STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002' LINES TERMINATED BY '\012';
+LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT4_CB;
+SELECT INPUT4_CB.VALUE, INPUT4_CB.KEY FROM INPUT4_CB;
+DROP TABLE INPUT4_CB

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_dfs.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_dfs.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_dfs.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_dfs.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,4 @@
+dfs -cat ../../../../build/contrib/hive/ql/test/data/files/kv1.txt;
+set fs.default.name=file://src
+dfs -ls
+

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_dynamicserde.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_dynamicserde.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_dynamicserde.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_dynamicserde.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,15 @@
+CREATE TABLE dest1(a array<int>, b array<string>, c map<string,string>, d int, e string)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY '1'
+COLLECTION ITEMS TERMINATED BY '2'
+MAP KEYS TERMINATED BY '3'
+LINES TERMINATED BY '10';
+
+EXPLAIN
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring;
+
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint, src_thrift.lstring, src_thrift.mstringstring, src_thrift.aint, src_thrift.astring;
+
+SELECT dest1.* FROM dest1;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_limit.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_limit.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_limit.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_limit.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,4 @@
+EXPLAIN
+SELECT x.* FROM SRC x LIMIT 20;
+
+SELECT x.* FROM SRC x LIMIT 20;

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q Tue Oct 21 11:11:05 2008
@@ -1,6 +1,6 @@
 CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING);
 
-EXPLAIN
+EXPLAIN EXTENDED
 FROM srcpart
 INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12';
 

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_part2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_part2.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_part2.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_part2.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,16 @@
+CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING);
+CREATE TABLE dest2(key INT, value STRING, hr STRING, ds STRING);
+
+EXPLAIN EXTENDED
+FROM srcpart
+INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12'
+INSERT OVERWRITE TABLE dest2 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-09' and srcpart.hr = '12';
+
+FROM srcpart
+INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12'
+INSERT OVERWRITE TABLE dest2 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-09' and srcpart.hr = '12';
+
+SELECT dest1.* FROM dest1;
+SELECT dest2.* FROM dest2;
+
+drop table dest2;

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q Tue Oct 21 11:11:05 2008
@@ -1,10 +1,10 @@
-CREATE TABLE dest1(key INT, value STRING);
+CREATE TABLE dest1(key INT, value STRING, mapvalue STRING);
 
 EXPLAIN
 FROM src_thrift
-INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring;
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring, src_thrift.mstringstring['key_2'];
 
 FROM src_thrift
-INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring;
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring, src_thrift.mstringstring['key_2'];
 
 SELECT dest1.* FROM dest1;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath2.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath2.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath2.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,10 @@
+CREATE TABLE dest1(lint_size INT, lintstring_size INT, mstringstring_size INT);
+
+EXPLAIN
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL);
+
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL);
+
+SELECT dest1.* FROM dest1;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath3.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath3.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath3.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath3.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,6 @@
+EXPLAIN
+FROM src_thrift
+SELECT src_thrift.mstringstring['key_9'], src_thrift.lintstring.myint;
+
+FROM src_thrift
+SELECT src_thrift.mstringstring['key_9'], src_thrift.lintstring.myint;

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl4.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl4.q?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl4.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl4.q Tue Oct 21 11:11:05 2008
@@ -1,4 +1,5 @@
 -- a simple test to test sorted/clustered syntax
+DROP TABLE INPUTDDL4;
 CREATE TABLE INPUTDDL4(viewTime DATETIME, userid INT,
                        page_url STRING, referrer_url STRING, 
                        friends ARRAY<BIGINT>, properties MAP<STRING, STRING>,
@@ -7,4 +8,5 @@
     PARTITIONED BY(ds DATETIME, country STRING) 
     CLUSTERED BY(userid) SORTED BY(viewTime) INTO 32 BUCKETS;
 DESCRIBE INPUTDDL4;
+DESCRIBE EXTENDED INPUTDDL4;
 DROP TABLE INPUTDDL4;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl6.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl6.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl6.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl6.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,16 @@
+-- test for describe extended table
+-- test for describe extended table partition
+-- test for alter table drop partition
+DROP TABLE INPUTDDL6;
+CREATE TABLE INPUTDDL6(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME);
+LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-09');
+LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUTDDL6 PARTITION (ds='2008-04-08');
+DESCRIBE EXTENDED INPUTDDL6;
+DESCRIBE EXTENDED INPUTDDL6 PARTITION (ds='2008-04-08');
+SHOW PARTITIONS INPUTDDL6;
+ALTER TABLE INPUTDDL6 DROP PARTITION (ds='2008-04-08');
+SHOW PARTITIONS INPUTDDL6;
+DROP TABLE INPUTDDL6;
+EXPLAIN
+DESCRIBE EXTENDED INPUTDDL6 PARTITION (ds='2008-04-09');
+

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join9.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join9.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join9.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/join9.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,10 @@
+CREATE TABLE dest1(key INT, value STRING);
+
+EXPLAIN EXTENDED
+FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where src1.ds = '2008-04-08' and src1.hr = '12';
+
+FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where src1.ds = '2008-04-08' and src1.hr = '12';
+
+SELECT dest1.* FROM dest1;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/nullinput.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/nullinput.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/nullinput.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/nullinput.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,4 @@
+create table tstnullinut(a string, b string);
+select x.* from tstnullinut x;
+select x.a, count(1) from tstnullinut x group by x.a;
+drop table tstnullinut;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/show_tables.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/show_tables.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/show_tables.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/show_tables.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,15 @@
+CREATE TABLE shtb_test1(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME);
+CREATE TABLE shtb_test2(KEY STRING, VALUE STRING) PARTITIONED BY(ds DATETIME);
+
+EXPLAIN
+SHOW TABLES 'shtb_*';
+
+SHOW TABLES 'shtb_*';
+
+EXPLAIN
+SHOW TABLES 'shtb_test1|shtb_test2';
+
+SHOW TABLES 'shtb_test1|shtb_test2';
+
+DROP TABLE shtb_test1;
+DROP TABLE shtb_test2;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/showparts.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/showparts.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/showparts.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/showparts.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,4 @@
+EXPLAIN
+SHOW PARTITIONS srcpart; 
+
+SHOW PARTITIONS srcpart;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/subq2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/subq2.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/subq2.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/subq2.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,8 @@
+EXPLAIN
+SELECT a.k, a.c
+FROM (SELECT b.key as k, count(1) as c FROM src b GROUP BY b.key) a
+WHERE a.k >= 90;
+
+SELECT a.k, a.c
+FROM (SELECT b.key as k, count(1) as c FROM src b GROUP BY b.key) a
+WHERE a.k >= 90;

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/udf2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/udf2.q?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/udf2.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/udf2.q Tue Oct 21 11:11:05 2008
@@ -1,10 +1,8 @@
-EXPLAIN
-CREATE TEMPORARY FUNCTION testlength AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength';
-
-CREATE TEMPORARY FUNCTION testlength AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength';
+CREATE TABLE dest1(c1 STRING);
 
-CREATE TABLE dest1(len INT);
+FROM src INSERT OVERWRITE TABLE dest1 SELECT '  abc  ' WHERE src.key = 86;
 
-FROM src INSERT OVERWRITE TABLE dest1 SELECT testlength(src.value);
+EXPLAIN
+SELECT '|', trim(dest1.c1), '|', rtrim(dest1.c1), '|', ltrim(dest1.c1), '|' FROM dest1;
 
-SELECT dest1.* FROM dest1;
+SELECT '|', trim(dest1.c1), '|', rtrim(dest1.c1), '|', ltrim(dest1.c1), '|' FROM dest1;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/udf3.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/udf3.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/udf3.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/udf3.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,10 @@
+CREATE TABLE dest1(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING);
+
+EXPLAIN
+FROM src INSERT OVERWRITE TABLE dest1 SELECT count(CAST('' AS INT)), sum(CAST('' AS INT)), avg(CAST('' AS INT)), 
+min(CAST('' AS INT)), max(CAST('' AS INT));
+
+FROM src INSERT OVERWRITE TABLE dest1 SELECT count(CAST('' AS INT)), sum(CAST('' AS INT)), avg(CAST('' AS INT)), 
+min(CAST('' AS INT)), max(CAST('' AS INT));
+
+SELECT dest1.* FROM dest1;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/udf4.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/udf4.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/udf4.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/clientpositive/udf4.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,8 @@
+CREATE TABLE dest1(c1 STRING);
+
+FROM src INSERT OVERWRITE TABLE dest1 SELECT '  abc  ' WHERE src.key = 86;
+
+EXPLAIN
+SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1;
+
+SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1;

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_dot.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_dot.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_dot.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_dot.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,2 @@
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.value.member WHERE src.key < 100

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_index.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_index.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_index.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_index.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,2 @@
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT src.key[0], src.value

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_list_index.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_list_index.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_list_index.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_list_index.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,2 @@
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[0], src_thrift.lstring['abc']

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_list_index2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_list_index2.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_list_index2.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_list_index2.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,2 @@
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[0], src_thrift.lstring[1 + 2]

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_map_index.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_map_index.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_map_index.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_map_index.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,2 @@
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[0], src_thrift.mstringstring[0]

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_map_index2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_map_index2.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_map_index2.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/negative/invalid_map_index2.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,2 @@
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[0], src_thrift.mstringstring[concat('abc', 'abc')]

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/case_sensitivity.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/case_sensitivity.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/case_sensitivity.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/case_sensitivity.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,2 @@
+FROM SRC_THRIFT
+INSERT OVERWRITE TABLE dest1 SELECT src_Thrift.LINT[1], src_thrift.lintstring[0].MYSTRING where src_thrift.liNT[0] > 0

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/input_testxpath.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/input_testxpath.q?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/input_testxpath.q (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/input_testxpath.q Tue Oct 21 11:11:05 2008
@@ -1,2 +1,2 @@
 FROM src_thrift
-INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring, src_thrift.mstringstring['key_2']

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/input_testxpath2.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/input_testxpath2.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/input_testxpath2.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/input_testxpath2.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1,2 @@
+FROM src_thrift
+INSERT OVERWRITE TABLE dest1 SELECT size(src_thrift.lint), size(src_thrift.lintstring), size(src_thrift.mstringstring) where src_thrift.lint IS NOT NULL AND NOT (src_thrift.mstringstring IS NULL)

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/udf4.q
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/udf4.q?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/udf4.q (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/queries/positive/udf4.q Tue Oct 21 11:11:05 2008
@@ -0,0 +1 @@
+SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/strict_pruning.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/strict_pruning.q.out?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/strict_pruning.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientnegative/strict_pruning.q.out Tue Oct 21 11:11:05 2008
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 2:7 No Partition Predicate Found 1:  for Alias srcpart Table srcpart

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/case_sensitivity.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/case_sensitivity.q.out?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/case_sensitivity.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/case_sensitivity.q.out Tue Oct 21 11:11:05 2008
@@ -0,0 +1,49 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF SRC_THRIFT)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR ([ (TOK_COLREF src_Thrift LINT) 1)) (TOK_SELEXPR (. ([ (TOK_COLREF src_thrift lintstring) 0) MYSTRING))) (TOK_WHERE (> ([ (TOK_COLREF src_thrift liNT) 0) 0))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src_thrift 
+            Filter Operator
+              predicate:
+                  expr: (lint[0] > 0)
+                  type: Boolean
+              Select Operator
+                expressions:
+                      expr: lint[1]
+                      type: int
+                      expr: lintstring[0].MYSTRING
+                      type: string
+                File Output Operator
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                      name: dest1
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+            replace:
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
+
+
+2	1
+4	8
+6	27
+8	64
+10	125
+12	216
+14	343
+16	512
+18	729

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out Tue Oct 21 11:11:05 2008
@@ -32,21 +32,20 @@
                       type: int
                 File Output Operator
                   table:
-                      name: dest1
-                      serde: simple_meta
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                      name: dest1
 
   Stage: Stage-0
     Move Operator
       tables:
+            replace:
             table:
-                name: dest1
-                serde: simple_meta
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-            replace:
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
 
 
 5	5.0	5.0	5.0	5	false	1
-

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out Tue Oct 21 11:11:05 2008
@@ -9,46 +9,51 @@
 STAGE PLANS:
   Stage: Stage-1
     Map Reduce
-      Reduce Operator Tree:
-          Group By Operator
-            keys:
-                  expr: VALUE.2
-                  type: string
-            mode: partial1
-          
-                expr: sum(VALUE.3)
-            File Output Operator
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
       Alias -> Map Operator Tree:
         src 
             Reduce Output Operator
-              tag: -1
               key expressions:
-                    expr: 0
-                    type: int
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
                     expr: key
                     type: string
+              # partition fields: -1
+              tag: -1
+              value expressions:
                     expr: substr(value, 4)
                     type: string
-              # partition fields: -1
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: sum(VALUE.0)
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial1
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
 
   Stage: Stage-2
     Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/748679827/1407352694.10001 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            # partition fields: 1
+            tag: -1
+            value expressions:
+                  expr: 1
+                  type: string
       Reduce Operator Tree:
         Group By Operator
+        
+              expr: sum(VALUE.0)
           keys:
                 expr: KEY.0
                 type: string
           mode: partial2
-        
-              expr: sum(VALUE.0)
           Select Operator
             expressions:
                   expr: 0
@@ -57,31 +62,20 @@
                   type: string
             File Output Operator
               table:
-                  name: dest1
-                  serde: simple_meta
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-      Alias -> Map Operator Tree:
-        /tmp/hive-zshao/178504461.10001 
-          Reduce Output Operator
-            tag: -1
-            key expressions:
-                  expr: 0
-                  type: string
-            value expressions:
-                  expr: 1
-                  type: string
-            # partition fields: 1
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
 
   Stage: Stage-0
     Move Operator
       tables:
+            replace:
             table:
-                name: dest1
-                serde: simple_meta
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-            replace:
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
 
 
 0	0.0
@@ -393,4 +387,3 @@
 96	96.0
 97	194.0
 98	196.0
-

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1_limit.q.out?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1_limit.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby1_limit.q.out Tue Oct 21 11:11:05 2008
@@ -0,0 +1,86 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4)))) (TOK_GROUPBY (TOK_COLREF src key)) (TOK_LIMIT 5)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              # partition fields: -1
+              tag: -1
+              value expressions:
+                    expr: substr(value, 4)
+                    type: string
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: sum(VALUE.0)
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial1
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/7427260/341902671.10001 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            # partition fields: 1
+            tag: -1
+            value expressions:
+                  expr: 1
+                  type: string
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: sum(VALUE.0)
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial2
+          Select Operator
+            expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: string
+            Limit
+              File Output Operator
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                    name: dest1
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+            replace:
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
+
+
+0	0.0
+10	10.0
+100	200.0
+103	206.0
+104	208.0

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out Tue Oct 21 11:11:05 2008
@@ -9,46 +9,54 @@
 STAGE PLANS:
   Stage: Stage-1
     Map Reduce
-      Reduce Operator Tree:
-          Group By Operator
-            keys:
-                  expr: VALUE.2
-                  type: string
-            mode: partial1
-          
-                expr: count(DISTINCT KEY.0)
-                expr: sum(KEY.0)
-            File Output Operator
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
       Alias -> Map Operator Tree:
         src 
             Reduce Output Operator
-              tag: -1
               key expressions:
-                    expr: substr(value, 4)
-                    type: string
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
                     expr: substr(key, 0, 1)
                     type: string
-              # partition fields: 1
+                    expr: substr(value, 4)
+                    type: string
+              # partition fields: 2147483647
+              tag: -1
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: count(DISTINCT KEY.1)
+              expr: sum(KEY.1)
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial1
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
 
   Stage: Stage-2
     Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/307368091/808162418.10001 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            # partition fields: 1
+            tag: -1
+            value expressions:
+                  expr: 1
+                  type: string
+                  expr: 2
+                  type: string
       Reduce Operator Tree:
         Group By Operator
+        
+              expr: count(VALUE.0)
+              expr: sum(VALUE.1)
           keys:
                 expr: KEY.0
                 type: string
           mode: partial2
-        
-              expr: count(VALUE.0)
-              expr: sum(VALUE.1)
           Select Operator
             expressions:
                   expr: 0
@@ -59,33 +67,20 @@
                   type: string
             File Output Operator
               table:
-                  name: dest1
-                  serde: simple_meta
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-      Alias -> Map Operator Tree:
-        /tmp/hive-zshao/29356866.10001 
-          Reduce Output Operator
-            tag: -1
-            key expressions:
-                  expr: 0
-                  type: string
-            value expressions:
-                  expr: 1
-                  type: string
-                  expr: 2
-                  type: string
-            # partition fields: 1
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
 
   Stage: Stage-0
     Move Operator
       tables:
+            replace:
             table:
-                name: dest1
-                serde: simple_meta
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-            replace:
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
 
 
 0	1	00.0
@@ -98,4 +93,3 @@
 7	6	7735.0
 8	8	8762.0
 9	7	91047.0
-

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2_limit.q.out?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2_limit.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby2_limit.q.out Tue Oct 21 11:11:05 2008
@@ -0,0 +1,78 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_COLREF src key)) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION substr (TOK_COLREF src value) 4)))) (TOK_GROUPBY (TOK_COLREF src key)) (TOK_LIMIT 5)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              # partition fields: -1
+              tag: -1
+              value expressions:
+                    expr: substr(value, 4)
+                    type: string
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: sum(VALUE.0)
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial1
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/606295988/175965730.10002 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            # partition fields: 1
+            tag: -1
+            value expressions:
+                  expr: 1
+                  type: string
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: sum(VALUE.0)
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial2
+          Select Operator
+            expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: string
+            Limit
+              File Output Operator
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 5
+
+
+0	0.0
+10	10.0
+100	200.0
+103	206.0
+104	208.0

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out Tue Oct 21 11:11:05 2008
@@ -9,44 +9,55 @@
 STAGE PLANS:
   Stage: Stage-1
     Map Reduce
-      Reduce Operator Tree:
-          Group By Operator
-            mode: partial1
-          
-                expr: avg(DISTINCT KEY.0)
-                expr: sum(KEY.0)
-                expr: avg(KEY.0)
-                expr: min(KEY.0)
-                expr: max(KEY.0)
-            File Output Operator
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
       Alias -> Map Operator Tree:
         src 
             Reduce Output Operator
-              tag: -1
               key expressions:
                     expr: substr(value, 4)
                     type: string
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
-              # partition fields: 1
+              # partition fields: 2147483647
+              tag: -1
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: avg(DISTINCT KEY.0)
+              expr: sum(KEY.0)
+              expr: avg(KEY.0)
+              expr: min(KEY.0)
+              expr: max(KEY.0)
+          mode: partial1
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
 
   Stage: Stage-2
     Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/629715569/118113569.10001 
+          Reduce Output Operator
+            # partition fields: 0
+            tag: -1
+            value expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: string
+                  expr: 2
+                  type: string
+                  expr: 3
+                  type: string
+                  expr: 4
+                  type: string
       Reduce Operator Tree:
         Group By Operator
-          mode: partial2
         
               expr: avg(VALUE.0)
               expr: sum(VALUE.1)
               expr: avg(VALUE.2)
               expr: min(VALUE.3)
               expr: max(VALUE.4)
+          mode: partial2
           Select Operator
             expressions:
                   expr: 1
@@ -61,37 +72,20 @@
                   type: string
             File Output Operator
               table:
-                  name: dest1
-                  serde: simple_meta
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-      Alias -> Map Operator Tree:
-        /tmp/hive-zshao/52918796.10001 
-          Reduce Output Operator
-            tag: -1
-            value expressions:
-                  expr: 0
-                  type: string
-                  expr: 1
-                  type: string
-                  expr: 2
-                  type: string
-                  expr: 3
-                  type: string
-                  expr: 4
-                  type: string
-            # partition fields: 0
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
 
   Stage: Stage-0
     Move Operator
       tables:
+            replace:
             table:
-                name: dest1
-                serde: simple_meta
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-            replace:
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
 
 
 130091.0	260.182	256.10355987055016	498.0	0.0
-

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out Tue Oct 21 11:11:05 2008
@@ -9,34 +9,35 @@
 STAGE PLANS:
   Stage: Stage-1
     Map Reduce
-      Reduce Operator Tree:
-          Group By Operator
-            keys:
-                  expr: VALUE.2
-                  type: string
-            mode: partial1
-            File Output Operator
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
       Alias -> Map Operator Tree:
         src 
             Reduce Output Operator
-              tag: -1
               key expressions:
-                    expr: 0
-                    type: int
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
                     expr: substr(key, 0, 1)
                     type: string
               # partition fields: -1
+              tag: -1
+      Reduce Operator Tree:
+        Group By Operator
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial1
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
 
   Stage: Stage-2
     Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/1561965178/525265780.10001 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            # partition fields: 1
+            tag: -1
       Reduce Operator Tree:
         Group By Operator
           keys:
@@ -49,28 +50,20 @@
                   type: string
             File Output Operator
               table:
-                  name: dest1
-                  serde: simple_meta
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-      Alias -> Map Operator Tree:
-        /tmp/hive-zshao/64182502.10001 
-          Reduce Output Operator
-            tag: -1
-            key expressions:
-                  expr: 0
-                  type: string
-            # partition fields: 1
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
 
   Stage: Stage-0
     Move Operator
       tables:
+            replace:
             table:
-                name: dest1
-                serde: simple_meta
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-            replace:
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
 
 
 0
@@ -83,4 +76,3 @@
 7
 8
 9
-

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5.q.out?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby5.q.out Tue Oct 21 11:11:05 2008
@@ -9,46 +9,51 @@
 STAGE PLANS:
   Stage: Stage-1
     Map Reduce
-      Reduce Operator Tree:
-          Group By Operator
-            keys:
-                  expr: VALUE.2
-                  type: string
-            mode: partial1
-          
-                expr: sum(VALUE.3)
-            File Output Operator
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
       Alias -> Map Operator Tree:
         src 
             Reduce Output Operator
-              tag: -1
               key expressions:
-                    expr: 0
-                    type: int
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
                     expr: key
                     type: string
+              # partition fields: -1
+              tag: -1
+              value expressions:
                     expr: substr(value, 4)
                     type: string
-              # partition fields: -1
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: sum(VALUE.0)
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial1
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
 
   Stage: Stage-2
     Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/888102295/1013886705.10001 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            # partition fields: 1
+            tag: -1
+            value expressions:
+                  expr: 1
+                  type: string
       Reduce Operator Tree:
         Group By Operator
+        
+              expr: sum(VALUE.0)
           keys:
                 expr: KEY.0
                 type: string
           mode: partial2
-        
-              expr: sum(VALUE.0)
           Select Operator
             expressions:
                   expr: 0
@@ -57,31 +62,20 @@
                   type: string
             File Output Operator
               table:
-                  name: dest1
-                  serde: simple_meta
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-      Alias -> Map Operator Tree:
-        /tmp/hive-zshao/317976905.10001 
-          Reduce Output Operator
-            tag: -1
-            key expressions:
-                  expr: 0
-                  type: string
-            value expressions:
-                  expr: 1
-                  type: string
-            # partition fields: 1
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
 
   Stage: Stage-0
     Move Operator
       tables:
+            replace:
             table:
-                name: dest1
-                serde: simple_meta
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-            replace:
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
 
 
 0	0.0
@@ -393,4 +387,3 @@
 96	96.0
 97	194.0
 98	196.0
-

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby6.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby6.q.out?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby6.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby6.q.out Tue Oct 21 11:11:05 2008
@@ -9,34 +9,35 @@
 STAGE PLANS:
   Stage: Stage-1
     Map Reduce
-      Reduce Operator Tree:
-          Group By Operator
-            keys:
-                  expr: VALUE.2
-                  type: string
-            mode: partial1
-            File Output Operator
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
       Alias -> Map Operator Tree:
         src 
             Reduce Output Operator
-              tag: -1
               key expressions:
-                    expr: 0
-                    type: int
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
                     expr: substr(value, 4, 1)
                     type: string
               # partition fields: -1
+              tag: -1
+      Reduce Operator Tree:
+        Group By Operator
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial1
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
 
   Stage: Stage-2
     Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/256745338/35530060.10001 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            # partition fields: 1
+            tag: -1
       Reduce Operator Tree:
         Group By Operator
           keys:
@@ -49,28 +50,20 @@
                   type: string
             File Output Operator
               table:
-                  name: dest1
-                  serde: simple_meta
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-      Alias -> Map Operator Tree:
-        /tmp/hive-zshao/286498248.10001 
-          Reduce Output Operator
-            tag: -1
-            key expressions:
-                  expr: 0
-                  type: string
-            # partition fields: 1
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
 
   Stage: Stage-0
     Move Operator
       tables:
+            replace:
             table:
-                name: dest1
-                serde: simple_meta
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-            replace:
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
 
 
 0
@@ -83,4 +76,3 @@
 7
 8
 9
-

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby7.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby7.q.out?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby7.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby7.q.out Tue Oct 21 11:11:05 2008
@@ -307,7 +307,6 @@
 96	96.0
 97	194.0
 98	196.0
-
 0	0.0
 10	10.0
 100	200.0
@@ -617,4 +616,3 @@
 96	96.0
 97	194.0
 98	196.0
-

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby8.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby8.q.out?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby8.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/groupby8.q.out Tue Oct 21 11:11:05 2008
@@ -307,7 +307,6 @@
 96	1
 97	1
 98	1
-
 0	1
 10	1
 100	1
@@ -617,4 +616,3 @@
 96	1
 97	1
 98	1
-

Added: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input.q.out?rev=706704&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input.q.out (added)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input.q.out Tue Oct 21 11:11:05 2008
@@ -0,0 +1,512 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF SRC x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF x)))))
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+238	val_238
+86	val_86
+311	val_311
+27	val_27
+165	val_165
+409	val_409
+255	val_255
+278	val_278
+98	val_98
+484	val_484
+265	val_265
+193	val_193
+401	val_401
+150	val_150
+273	val_273
+224	val_224
+369	val_369
+66	val_66
+128	val_128
+213	val_213
+146	val_146
+406	val_406
+429	val_429
+374	val_374
+152	val_152
+469	val_469
+145	val_145
+495	val_495
+37	val_37
+327	val_327
+281	val_281
+277	val_277
+209	val_209
+15	val_15
+82	val_82
+403	val_403
+166	val_166
+417	val_417
+430	val_430
+252	val_252
+292	val_292
+219	val_219
+287	val_287
+153	val_153
+193	val_193
+338	val_338
+446	val_446
+459	val_459
+394	val_394
+237	val_237
+482	val_482
+174	val_174
+413	val_413
+494	val_494
+207	val_207
+199	val_199
+466	val_466
+208	val_208
+174	val_174
+399	val_399
+396	val_396
+247	val_247
+417	val_417
+489	val_489
+162	val_162
+377	val_377
+397	val_397
+309	val_309
+365	val_365
+266	val_266
+439	val_439
+342	val_342
+367	val_367
+325	val_325
+167	val_167
+195	val_195
+475	val_475
+17	val_17
+113	val_113
+155	val_155
+203	val_203
+339	val_339
+0	val_0
+455	val_455
+128	val_128
+311	val_311
+316	val_316
+57	val_57
+302	val_302
+205	val_205
+149	val_149
+438	val_438
+345	val_345
+129	val_129
+170	val_170
+20	val_20
+489	val_489
+157	val_157
+378	val_378
+221	val_221
+92	val_92
+111	val_111
+47	val_47
+72	val_72
+4	val_4
+280	val_280
+35	val_35
+427	val_427
+277	val_277
+208	val_208
+356	val_356
+399	val_399
+169	val_169
+382	val_382
+498	val_498
+125	val_125
+386	val_386
+437	val_437
+469	val_469
+192	val_192
+286	val_286
+187	val_187
+176	val_176
+54	val_54
+459	val_459
+51	val_51
+138	val_138
+103	val_103
+239	val_239
+213	val_213
+216	val_216
+430	val_430
+278	val_278
+176	val_176
+289	val_289
+221	val_221
+65	val_65
+318	val_318
+332	val_332
+311	val_311
+275	val_275
+137	val_137
+241	val_241
+83	val_83
+333	val_333
+180	val_180
+284	val_284
+12	val_12
+230	val_230
+181	val_181
+67	val_67
+260	val_260
+404	val_404
+384	val_384
+489	val_489
+353	val_353
+373	val_373
+272	val_272
+138	val_138
+217	val_217
+84	val_84
+348	val_348
+466	val_466
+58	val_58
+8	val_8
+411	val_411
+230	val_230
+208	val_208
+348	val_348
+24	val_24
+463	val_463
+431	val_431
+179	val_179
+172	val_172
+42	val_42
+129	val_129
+158	val_158
+119	val_119
+496	val_496
+0	val_0
+322	val_322
+197	val_197
+468	val_468
+393	val_393
+454	val_454
+100	val_100
+298	val_298
+199	val_199
+191	val_191
+418	val_418
+96	val_96
+26	val_26
+165	val_165
+327	val_327
+230	val_230
+205	val_205
+120	val_120
+131	val_131
+51	val_51
+404	val_404
+43	val_43
+436	val_436
+156	val_156
+469	val_469
+468	val_468
+308	val_308
+95	val_95
+196	val_196
+288	val_288
+481	val_481
+457	val_457
+98	val_98
+282	val_282
+197	val_197
+187	val_187
+318	val_318
+318	val_318
+409	val_409
+470	val_470
+137	val_137
+369	val_369
+316	val_316
+169	val_169
+413	val_413
+85	val_85
+77	val_77
+0	val_0
+490	val_490
+87	val_87
+364	val_364
+179	val_179
+118	val_118
+134	val_134
+395	val_395
+282	val_282
+138	val_138
+238	val_238
+419	val_419
+15	val_15
+118	val_118
+72	val_72
+90	val_90
+307	val_307
+19	val_19
+435	val_435
+10	val_10
+277	val_277
+273	val_273
+306	val_306
+224	val_224
+309	val_309
+389	val_389
+327	val_327
+242	val_242
+369	val_369
+392	val_392
+272	val_272
+331	val_331
+401	val_401
+242	val_242
+452	val_452
+177	val_177
+226	val_226
+5	val_5
+497	val_497
+402	val_402
+396	val_396
+317	val_317
+395	val_395
+58	val_58
+35	val_35
+336	val_336
+95	val_95
+11	val_11
+168	val_168
+34	val_34
+229	val_229
+233	val_233
+143	val_143
+472	val_472
+322	val_322
+498	val_498
+160	val_160
+195	val_195
+42	val_42
+321	val_321
+430	val_430
+119	val_119
+489	val_489
+458	val_458
+78	val_78
+76	val_76
+41	val_41
+223	val_223
+492	val_492
+149	val_149
+449	val_449
+218	val_218
+228	val_228
+138	val_138
+453	val_453
+30	val_30
+209	val_209
+64	val_64
+468	val_468
+76	val_76
+74	val_74
+342	val_342
+69	val_69
+230	val_230
+33	val_33
+368	val_368
+103	val_103
+296	val_296
+113	val_113
+216	val_216
+367	val_367
+344	val_344
+167	val_167
+274	val_274
+219	val_219
+239	val_239
+485	val_485
+116	val_116
+223	val_223
+256	val_256
+263	val_263
+70	val_70
+487	val_487
+480	val_480
+401	val_401
+288	val_288
+191	val_191
+5	val_5
+244	val_244
+438	val_438
+128	val_128
+467	val_467
+432	val_432
+202	val_202
+316	val_316
+229	val_229
+469	val_469
+463	val_463
+280	val_280
+2	val_2
+35	val_35
+283	val_283
+331	val_331
+235	val_235
+80	val_80
+44	val_44
+193	val_193
+321	val_321
+335	val_335
+104	val_104
+466	val_466
+366	val_366
+175	val_175
+403	val_403
+483	val_483
+53	val_53
+105	val_105
+257	val_257
+406	val_406
+409	val_409
+190	val_190
+406	val_406
+401	val_401
+114	val_114
+258	val_258
+90	val_90
+203	val_203
+262	val_262
+348	val_348
+424	val_424
+12	val_12
+396	val_396
+201	val_201
+217	val_217
+164	val_164
+431	val_431
+454	val_454
+478	val_478
+298	val_298
+125	val_125
+431	val_431
+164	val_164
+424	val_424
+187	val_187
+382	val_382
+5	val_5
+70	val_70
+397	val_397
+480	val_480
+291	val_291
+24	val_24
+351	val_351
+255	val_255
+104	val_104
+70	val_70
+163	val_163
+438	val_438
+119	val_119
+414	val_414
+200	val_200
+491	val_491
+237	val_237
+439	val_439
+360	val_360
+248	val_248
+479	val_479
+305	val_305
+417	val_417
+199	val_199
+444	val_444
+120	val_120
+429	val_429
+169	val_169
+443	val_443
+323	val_323
+325	val_325
+277	val_277
+230	val_230
+478	val_478
+178	val_178
+468	val_468
+310	val_310
+317	val_317
+333	val_333
+493	val_493
+460	val_460
+207	val_207
+249	val_249
+265	val_265
+480	val_480
+83	val_83
+136	val_136
+353	val_353
+172	val_172
+214	val_214
+462	val_462
+233	val_233
+406	val_406
+133	val_133
+175	val_175
+189	val_189
+454	val_454
+375	val_375
+401	val_401
+421	val_421
+407	val_407
+384	val_384
+256	val_256
+26	val_26
+134	val_134
+67	val_67
+384	val_384
+379	val_379
+18	val_18
+462	val_462
+492	val_492
+100	val_100
+298	val_298
+9	val_9
+341	val_341
+498	val_498
+146	val_146
+458	val_458
+362	val_362
+186	val_186
+285	val_285
+348	val_348
+167	val_167
+18	val_18
+273	val_273
+183	val_183
+281	val_281
+344	val_344
+97	val_97
+469	val_469
+315	val_315
+84	val_84
+28	val_28
+37	val_37
+448	val_448
+152	val_152
+348	val_348
+307	val_307
+194	val_194
+414	val_414
+477	val_477
+222	val_222
+126	val_126
+90	val_90
+169	val_169
+403	val_403
+400	val_400
+200	val_200
+97	val_97

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input1.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input1.q.out?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input1.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input1.q.out Tue Oct 21 11:11:05 2008
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_DESCTABLE TEST1)
+  (TOK_DESCTABLE (TOK_TAB TEST1))
 
 STAGE DEPENDENCIES:
   Stage-0 is a root stage

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input10.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input10.q.out?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input10.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input10.q.out Tue Oct 21 11:11:05 2008
@@ -1,5 +1,5 @@
 ABSTRACT SYNTAX TREE:
-  (TOK_DESCTABLE TEST10)
+  (TOK_DESCTABLE (TOK_TAB TEST10))
 
 STAGE DEPENDENCIES:
   Stage-0 is a root stage

Modified: hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11.q.out?rev=706704&r1=706703&r2=706704&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11.q.out (original)
+++ hadoop/core/trunk/src/contrib/hive/ql/src/test/results/clientpositive/input11.q.out Tue Oct 21 11:11:05 2008
@@ -22,20 +22,20 @@
                       type: string
                 File Output Operator
                   table:
-                      name: dest1
-                      serde: simple_meta
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                      name: dest1
 
   Stage: Stage-0
     Move Operator
       tables:
+            replace:
             table:
-                name: dest1
-                serde: simple_meta
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-            replace:
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
 
 
 86	val_86
@@ -122,4 +122,3 @@
 37	val_37
 90	val_90
 97	val_97
-