You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2009/08/24 20:23:22 UTC

svn commit: r807330 [2/3] - in /hadoop/hive/trunk: ./ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ contrib/src/test/queries/clientpositive/ contrib/src/test/results/clientpositive/ eclipse-t...

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesOutput.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesOutput.java?rev=807330&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesOutput.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesOutput.java Mon Aug 24 18:23:19 2009
@@ -0,0 +1,324 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.util.typedbytes;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.Map.Entry;
+
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.record.Buffer;
+
+/**
+ * Provides functionality for writing typed bytes.
+ */
+public class TypedBytesOutput {
+
+  private DataOutput out;
+
+  private TypedBytesOutput() {}
+
+  private void setDataOutput(DataOutput out) {
+    this.out = out;
+  }
+
+  private static ThreadLocal tbOut = new ThreadLocal() {
+    protected synchronized Object initialValue() {
+      return new TypedBytesOutput();
+    }
+  };
+
+  /**
+   * Get a thread-local typed bytes output for the supplied {@link DataOutput}.
+   * 
+   * @param out data output object
+   * @return typed bytes output corresponding to the supplied 
+   * {@link DataOutput}.
+   */
+  public static TypedBytesOutput get(DataOutput out) {
+    TypedBytesOutput bout = (TypedBytesOutput) tbOut.get();
+    bout.setDataOutput(out);
+    return bout;
+  }
+
+  /** Creates a new instance of TypedBytesOutput. */
+  public TypedBytesOutput(DataOutput out) {
+    this.out = out;
+  }
+  
+  /**
+   * Writes a Java object as a typed bytes sequence.
+   * 
+   * @param obj the object to be written
+   * @throws IOException
+   */
+  public void write(Object obj) throws IOException {
+    if (obj instanceof Buffer) {
+      writeBytes(((Buffer) obj).get());
+    } else if (obj instanceof Byte) {
+      writeByte((Byte) obj);
+    } else if (obj instanceof Boolean) {
+      writeBool((Boolean) obj);
+    } else if (obj instanceof Integer) {
+      writeInt((Integer) obj);
+    } else if (obj instanceof Long) {
+      writeLong((Long) obj);
+    } else if (obj instanceof Float) {
+      writeFloat((Float) obj);
+    } else if (obj instanceof Double) {
+      writeDouble((Double) obj);
+    } else if (obj instanceof String) {
+      writeString((String) obj);
+    } else if (obj instanceof ArrayList) {
+      writeVector((ArrayList) obj);
+    } else if (obj instanceof List) {
+      writeList((List) obj);
+    } else if (obj instanceof Map) {
+      writeMap((Map) obj);
+    } else {
+      throw new RuntimeException("cannot write objects of this type");
+    }
+  }
+
+  /**
+   * Writes a raw sequence of typed bytes.
+   * 
+   * @param bytes the bytes to be written
+   * @throws IOException
+   */
+  public void writeRaw(byte[] bytes) throws IOException {
+    out.write(bytes);
+  }
+
+  /**
+   * Writes a raw sequence of typed bytes.
+   * 
+   * @param bytes the bytes to be written
+   * @param offset an offset in the given array
+   * @param length number of bytes from the given array to write
+   * @throws IOException
+   */
+  public void writeRaw(byte[] bytes, int offset, int length)
+    throws IOException {
+    out.write(bytes, offset, length);
+  }
+
+  /**
+   * Writes a bytes array as a typed bytes sequence, using a given typecode.
+   * 
+   * @param bytes the bytes array to be written
+   * @param code the typecode to use
+   * @throws IOException
+   */
+  public void writeBytes(byte[] bytes, int code) throws IOException {
+    out.write(code);
+    out.writeInt(bytes.length);
+    out.write(bytes);
+  }
+  
+  /**
+   * Writes a bytes array as a typed bytes sequence.
+   * 
+   * @param bytes the bytes array to be written
+   * @throws IOException
+   */
+  public void writeBytes(byte[] bytes) throws IOException {
+    writeBytes(bytes, Type.BYTES.code);
+  }
+
+  /**
+   * Writes a byte as a typed bytes sequence.
+   * 
+   * @param b the byte to be written
+   * @throws IOException
+   */
+  public void writeByte(byte b) throws IOException {
+    out.write(Type.BYTE.code);
+    out.write(b);
+  }
+
+  /**
+   * Writes a boolean as a typed bytes sequence.
+   * 
+   * @param b the boolean to be written
+   * @throws IOException
+   */
+  public void writeBool(boolean b) throws IOException {
+    out.write(Type.BOOL.code);
+    out.writeBoolean(b);
+  }
+
+  /**
+   * Writes an integer as a typed bytes sequence.
+   * 
+   * @param i the integer to be written
+   * @throws IOException
+   */
+  public void writeInt(int i) throws IOException {
+    out.write(Type.INT.code);
+    out.writeInt(i);
+  }
+
+  /**
+   * Writes a long as a typed bytes sequence.
+   * 
+   * @param l the long to be written
+   * @throws IOException
+   */
+  public void writeLong(long l) throws IOException {
+    out.write(Type.LONG.code);
+    out.writeLong(l);
+  }
+
+  /**
+   * Writes a float as a typed bytes sequence.
+   * 
+   * @param f the float to be written
+   * @throws IOException
+   */
+  public void writeFloat(float f) throws IOException {
+    out.write(Type.FLOAT.code);
+    out.writeFloat(f);
+  }
+
+  /**
+   * Writes a double as a typed bytes sequence.
+   * 
+   * @param d the double to be written
+   * @throws IOException
+   */
+  public void writeDouble(double d) throws IOException {
+    out.write(Type.DOUBLE.code);
+    out.writeDouble(d);
+  }
+  
+  /**
+   * Writes a short as a typed bytes sequence.
+   * 
+   * @param s the short to be written
+   * @throws IOException
+   */
+  public void writeShort(short s) throws IOException {
+    out.write(Type.SHORT.code);
+    out.writeShort(s);
+  }
+
+  /**
+   * Writes a string as a typed bytes sequence.
+   * 
+   * @param s the string to be written
+   * @throws IOException
+   */
+  public void writeString(String s) throws IOException {
+    out.write(Type.STRING.code);
+    WritableUtils.writeString(out, s);
+  }
+
+  /**
+   * Writes a vector as a typed bytes sequence.
+   * 
+   * @param vector the vector to be written
+   * @throws IOException
+   */
+  public void writeVector(ArrayList vector) throws IOException {
+    writeVectorHeader(vector.size());
+    for (Object obj : vector) {
+      write(obj);
+    }
+  }
+
+  /**
+   * Writes a vector header.
+   * 
+   * @param length the number of elements in the vector
+   * @throws IOException
+   */
+  public void writeVectorHeader(int length) throws IOException {
+    out.write(Type.VECTOR.code);
+    out.writeInt(length);
+  }
+
+  /**
+   * Writes a list as a typed bytes sequence.
+   * 
+   * @param list the list to be written
+   * @throws IOException
+   */
+  public void writeList(List list) throws IOException {
+    writeListHeader();
+    for (Object obj : list) {
+      write(obj);
+    }
+    writeListFooter();
+  }
+
+  /**
+   * Writes a list header.
+   * 
+   * @throws IOException
+   */
+  public void writeListHeader() throws IOException {
+    out.write(Type.LIST.code);
+  }
+
+  /**
+   * Writes a list footer.
+   * 
+   * @throws IOException
+   */
+  public void writeListFooter() throws IOException {
+    out.write(Type.MARKER.code);
+  }
+
+  /**
+   * Writes a map as a typed bytes sequence.
+   * 
+   * @param map the map to be written
+   * @throws IOException
+   */
+  @SuppressWarnings("unchecked")
+  public void writeMap(Map map) throws IOException {
+    writeMapHeader(map.size());
+    Set<Entry> entries = map.entrySet();
+    for (Entry entry : entries) {
+      write(entry.getKey());
+      write(entry.getValue());
+    }
+  }
+
+  /**
+   * Writes a map header.
+   * 
+   * @param length the number of key-value pairs in the map
+   * @throws IOException
+   */
+  public void writeMapHeader(int length) throws IOException {
+    out.write(Type.MAP.code);
+    out.writeInt(length);
+  }
+  
+  public void writeEndOfRecord() throws IOException {
+    out.write(Type.ENDOFRECORD.code);
+  }
+
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesRecordInput.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesRecordInput.java?rev=807330&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesRecordInput.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesRecordInput.java Mon Aug 24 18:23:19 2009
@@ -0,0 +1,159 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.util.typedbytes;
+
+import java.io.DataInput;
+import java.io.IOException;
+
+import org.apache.hadoop.record.Buffer;
+import org.apache.hadoop.record.Index;
+import org.apache.hadoop.record.RecordInput;
+
+/**
+ * Serializer for records that writes typed bytes.
+ */
+public class TypedBytesRecordInput implements RecordInput {
+
+  private TypedBytesInput in;
+
+  private TypedBytesRecordInput() {}
+
+  private void setTypedBytesInput(TypedBytesInput in) {
+    this.in = in;
+  }
+
+  private static ThreadLocal tbIn = new ThreadLocal() {
+    protected synchronized Object initialValue() {
+      return new TypedBytesRecordInput();
+    }
+  };
+
+  /**
+   * Get a thread-local typed bytes record input for the supplied
+   * {@link TypedBytesInput}.
+   * 
+   * @param in typed bytes input object
+   * @return typed bytes record input corresponding to the supplied
+   *         {@link TypedBytesInput}.
+   */
+  public static TypedBytesRecordInput get(TypedBytesInput in) {
+    TypedBytesRecordInput bin = (TypedBytesRecordInput) tbIn.get();
+    bin.setTypedBytesInput(in);
+    return bin;
+  }
+
+  /**
+   * Get a thread-local typed bytes record input for the supplied
+   * {@link DataInput}.
+   * 
+   * @param in data input object
+   * @return typed bytes record input corresponding to the supplied
+   *         {@link DataInput}.
+   */
+  public static TypedBytesRecordInput get(DataInput in) {
+    return get(TypedBytesInput.get(in));
+  }
+
+  /** Creates a new instance of TypedBytesRecordInput. */
+  public TypedBytesRecordInput(TypedBytesInput in) {
+    this.in = in;
+  }
+
+  /** Creates a new instance of TypedBytesRecordInput. */
+  public TypedBytesRecordInput(DataInput in) {
+    this(new TypedBytesInput(in));
+  }
+
+  public boolean readBool(String tag) throws IOException {
+    in.skipType();
+    return in.readBool();
+  }
+
+  public Buffer readBuffer(String tag) throws IOException {
+    in.skipType();
+    return new Buffer(in.readBytes());
+  }
+
+  public byte readByte(String tag) throws IOException {
+    in.skipType();
+    return in.readByte();
+  }
+
+  public double readDouble(String tag) throws IOException {
+    in.skipType();
+    return in.readDouble();
+  }
+
+  public float readFloat(String tag) throws IOException {
+    in.skipType();
+    return in.readFloat();
+  }
+
+  public int readInt(String tag) throws IOException {
+    in.skipType();
+    return in.readInt();
+  }
+
+  public long readLong(String tag) throws IOException {
+    in.skipType();
+    return in.readLong();
+  }
+
+  public String readString(String tag) throws IOException {
+    in.skipType();
+    return in.readString();
+  }
+
+  public void startRecord(String tag) throws IOException {
+    in.skipType();
+  }
+
+  public Index startVector(String tag) throws IOException {
+    in.skipType();
+    return new TypedBytesIndex(in.readVectorHeader());
+  }
+
+  public Index startMap(String tag) throws IOException {
+    in.skipType();
+    return new TypedBytesIndex(in.readMapHeader());
+  }
+
+  public void endRecord(String tag) throws IOException {}
+
+  public void endVector(String tag) throws IOException {}
+
+  public void endMap(String tag) throws IOException {}
+
+  private static  final class TypedBytesIndex implements Index {
+    private int nelems;
+
+    private TypedBytesIndex(int nelems) {
+      this.nelems = nelems;
+    }
+
+    public boolean done() {
+      return (nelems <= 0);
+    }
+
+    public void incr() {
+      nelems--;
+    }
+  }
+
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesRecordOutput.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesRecordOutput.java?rev=807330&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesRecordOutput.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesRecordOutput.java Mon Aug 24 18:23:19 2009
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.util.typedbytes;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.TreeMap;
+
+import org.apache.hadoop.record.Buffer;
+import org.apache.hadoop.record.Record;
+import org.apache.hadoop.record.RecordOutput;
+
+/**
+ * Deserialized for records that reads typed bytes.
+ */
+public class TypedBytesRecordOutput implements RecordOutput {
+
+  private TypedBytesOutput out;
+
+  private TypedBytesRecordOutput() {}
+
+  private void setTypedBytesOutput(TypedBytesOutput out) {
+    this.out = out;
+  }
+
+  private static ThreadLocal tbOut = new ThreadLocal() {
+    protected synchronized Object initialValue() {
+      return new TypedBytesRecordOutput();
+    }
+  };
+
+  /**
+   * Get a thread-local typed bytes record input for the supplied
+   * {@link TypedBytesOutput}.
+   * 
+   * @param out typed bytes output object
+   * @return typed bytes record output corresponding to the supplied
+   *         {@link TypedBytesOutput}.
+   */
+  public static TypedBytesRecordOutput get(TypedBytesOutput out) {
+    TypedBytesRecordOutput bout = (TypedBytesRecordOutput) tbOut.get();
+    bout.setTypedBytesOutput(out);
+    return bout;
+  }
+
+  /**
+   * Get a thread-local typed bytes record output for the supplied
+   * {@link DataOutput}.
+   * 
+   * @param out data output object
+   * @return typed bytes record output corresponding to the supplied
+   *         {@link DataOutput}.
+   */
+  public static TypedBytesRecordOutput get(DataOutput out) {
+    return get(TypedBytesOutput.get(out));
+  }
+
+  /** Creates a new instance of TypedBytesRecordOutput. */
+  public TypedBytesRecordOutput(TypedBytesOutput out) {
+    this.out = out;
+  }
+
+  /** Creates a new instance of TypedBytesRecordOutput. */
+  public TypedBytesRecordOutput(DataOutput out) {
+    this(new TypedBytesOutput(out));
+  }
+
+  public void writeBool(boolean b, String tag) throws IOException {
+    out.writeBool(b);
+  }
+
+  public void writeBuffer(Buffer buf, String tag) throws IOException {
+    out.writeBytes(buf.get());
+  }
+
+  public void writeByte(byte b, String tag) throws IOException {
+    out.writeByte(b);
+  }
+
+  public void writeDouble(double d, String tag) throws IOException {
+    out.writeDouble(d);
+  }
+
+  public void writeFloat(float f, String tag) throws IOException {
+    out.writeFloat(f);
+  }
+
+  public void writeInt(int i, String tag) throws IOException {
+    out.writeInt(i);
+  }
+
+  public void writeLong(long l, String tag) throws IOException {
+    out.writeLong(l);
+  }
+
+  public void writeString(String s, String tag) throws IOException {
+    out.writeString(s);
+  }
+
+  public void startRecord(Record r, String tag) throws IOException {
+    out.writeListHeader();
+  }
+
+  public void startVector(ArrayList v, String tag) throws IOException {
+    out.writeVectorHeader(v.size());
+  }
+
+  public void startMap(TreeMap m, String tag) throws IOException {
+    out.writeMapHeader(m.size());
+  }
+
+  public void endRecord(Record r, String tag) throws IOException {
+    out.writeListFooter();
+  }
+
+  public void endVector(ArrayList v, String tag) throws IOException {}
+
+  public void endMap(TreeMap m, String tag) throws IOException {}
+
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesWritable.java?rev=807330&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesWritable.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesWritable.java Mon Aug 24 18:23:19 2009
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.util.typedbytes;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.io.BytesWritable;
+
+/**
+ * Writable for typed bytes.
+ */
+public class TypedBytesWritable extends BytesWritable {
+
+  /** Create a TypedBytesWritable. */
+  public TypedBytesWritable() {
+    super();
+  }
+
+  /** Create a TypedBytesWritable with a given byte array as initial value. */
+  public TypedBytesWritable(byte[] bytes) {
+    super(bytes);
+  }
+
+  /** Set the typed bytes from a given Java object. */
+  public void setValue(Object obj) {
+    try {
+      ByteArrayOutputStream baos = new ByteArrayOutputStream();
+      TypedBytesOutput tbo = TypedBytesOutput.get(new DataOutputStream(baos));
+      tbo.write(obj);
+      byte[] bytes = baos.toByteArray();
+      set(bytes, 0, bytes.length);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  /** Get the typed bytes as a Java object. */
+  public Object getValue() {
+    try {
+      ByteArrayInputStream bais = new ByteArrayInputStream(get());
+      TypedBytesInput tbi = TypedBytesInput.get(new DataInputStream(bais));
+      Object obj = tbi.read();
+      return obj;
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  /** Get the type code embedded in the first byte. */
+  public Type getType() {
+    byte[] bytes = get();
+    if (bytes == null || bytes.length == 0) {
+      return null;
+    }
+    for (Type type : Type.values()) {
+      if (type.code == (int) bytes[0]) {
+        return type;
+      }
+    }
+    return null;
+  }
+
+  /** Generate a suitable string representation. */
+  public String toString() {
+    return getValue().toString();
+  }
+
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesWritableInput.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesWritableInput.java?rev=807330&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesWritableInput.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesWritableInput.java Mon Aug 24 18:23:19 2009
@@ -0,0 +1,404 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.util.typedbytes;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.io.ArrayWritable;
+import org.apache.hadoop.io.BooleanWritable;
+
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.SortedMapWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.VIntWritable;
+import org.apache.hadoop.io.VLongWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * Provides functionality for reading typed bytes as Writable objects.
+ * 
+ * @see TypedBytesInput
+ */
+public class TypedBytesWritableInput implements Configurable {
+
+  private TypedBytesInput in;
+  private Configuration conf;
+
+  private TypedBytesWritableInput() {
+    conf = new Configuration();
+  }
+
+  private void setTypedBytesInput(TypedBytesInput in) {
+    this.in = in;
+  }
+
+  private static ThreadLocal tbIn = new ThreadLocal() {
+    protected synchronized Object initialValue() {
+      return new TypedBytesWritableInput();
+    }
+  };
+
+  /**
+   * Get a thread-local typed bytes writable input for the supplied
+   * {@link TypedBytesInput}.
+   * 
+   * @param in typed bytes input object
+   * @return typed bytes writable input corresponding to the supplied
+   *         {@link TypedBytesInput}.
+   */
+  public static TypedBytesWritableInput get(TypedBytesInput in) {
+    TypedBytesWritableInput bin = (TypedBytesWritableInput) tbIn.get();
+    bin.setTypedBytesInput(in);
+    return bin;
+  }
+
+  /**
+   * Get a thread-local typed bytes writable input for the supplied
+   * {@link DataInput}.
+   * 
+   * @param in data input object
+   * @return typed bytes writable input corresponding to the supplied
+   *         {@link DataInput}.
+   */
+  public static TypedBytesWritableInput get(DataInput in) {
+    return get(TypedBytesInput.get(in));
+  }
+
+  /** Creates a new instance of TypedBytesWritableInput. */
+  public TypedBytesWritableInput(TypedBytesInput in) {
+    this();
+    this.in = in;
+  }
+
+  /** Creates a new instance of TypedBytesWritableInput. */
+  public TypedBytesWritableInput(DataInput din) {
+    this(new TypedBytesInput(din));
+  }
+
+  public Writable read() throws IOException {
+    Type type = in.readType();
+    if (type == null) {
+      return null;
+    }
+    switch (type) {
+    case BYTES:
+      return readBytes();
+    case BYTE:
+      return readByte();
+    case BOOL:
+      return readBoolean();
+    case INT:
+      return readVInt();
+    case SHORT:
+      return readShort();
+    case LONG:
+      return readVLong();
+    case FLOAT:
+      return readFloat();
+    case DOUBLE:
+      return readDouble();
+    case STRING:
+      return readText();
+    case VECTOR:
+      return readArray();
+    case MAP:
+      return readMap();
+    case WRITABLE:
+      return readWritable();
+    case ENDOFRECORD:
+      return null;
+    default:
+      throw new RuntimeException("unknown type");
+    }
+  }
+
+  public Type readTypeCode() throws IOException {
+    return in.readType();
+  }
+  
+  public Class<? extends Writable> readType() throws IOException {
+    Type type = in.readType();
+    if (type == null) {
+      return null;
+    }
+    switch (type) {
+    case BYTES:
+      return BytesWritable.class;
+    case BYTE:
+      return ByteWritable.class;
+    case BOOL:
+      return BooleanWritable.class;
+    case INT:
+      return VIntWritable.class;
+    case LONG:
+      return VLongWritable.class;
+    case FLOAT:
+      return FloatWritable.class;
+    case DOUBLE:
+      return DoubleWritable.class;
+    case STRING:
+      return Text.class;
+    case VECTOR:
+      return ArrayWritable.class;
+    case MAP:
+      return MapWritable.class;
+    case WRITABLE:
+      return Writable.class;
+    case ENDOFRECORD:
+      return null;
+    default:
+      throw new RuntimeException("unknown type");
+    }
+  }
+
+  public BytesWritable readBytes(BytesWritable bw) throws IOException {
+    byte[] bytes = in.readBytes();
+    if (bw == null) {
+      bw = new BytesWritable(bytes);
+    } else {
+      bw.set(bytes, 0, bytes.length);
+    }
+    return bw;
+  }
+
+  public BytesWritable readBytes() throws IOException {
+    return readBytes(null);
+  }
+
+  public ByteWritable readByte(ByteWritable bw) throws IOException {
+    if (bw == null) {
+      bw = new ByteWritable();
+    }
+    bw.set(in.readByte());
+    return bw;
+  }
+
+  public ByteWritable readByte() throws IOException {
+    return readByte(null);
+  }
+
+  public BooleanWritable readBoolean(BooleanWritable bw) throws IOException {
+    if (bw == null) {
+      bw = new BooleanWritable();
+    }
+    bw.set(in.readBool());
+    return bw;
+  }
+
+  public BooleanWritable readBoolean() throws IOException {
+    return readBoolean(null);
+  }
+
+  public IntWritable readInt(IntWritable iw) throws IOException {
+    if (iw == null) {
+      iw = new IntWritable();
+    }
+    iw.set(in.readInt());
+    return iw;
+  }
+
+  public IntWritable readInt() throws IOException {
+    return readInt(null);
+  }
+
+  public ShortWritable readShort(ShortWritable sw) throws IOException {
+    if (sw == null) {
+      sw = new ShortWritable();
+    }
+    sw.set(in.readShort());
+    return sw;
+  }
+
+  public ShortWritable readShort() throws IOException {
+    return readShort(null);
+  }
+  
+  public VIntWritable readVInt(VIntWritable iw) throws IOException {
+    if (iw == null) {
+      iw = new VIntWritable();
+    }
+    iw.set(in.readInt());
+    return iw;
+  }
+
+  public VIntWritable readVInt() throws IOException {
+    return readVInt(null);
+  }
+
+  public LongWritable readLong(LongWritable lw) throws IOException {
+    if (lw == null) {
+      lw = new LongWritable();
+    }
+    lw.set(in.readLong());
+    return lw;
+  }
+
+  public LongWritable readLong() throws IOException {
+    return readLong(null);
+  }
+
+  public VLongWritable readVLong(VLongWritable lw) throws IOException {
+    if (lw == null) {
+      lw = new VLongWritable();
+    }
+    lw.set(in.readLong());
+    return lw;
+  }
+
+  public VLongWritable readVLong() throws IOException {
+    return readVLong(null);
+  }
+
+  public FloatWritable readFloat(FloatWritable fw) throws IOException {
+    if (fw == null) {
+      fw = new FloatWritable();
+    }
+    fw.set(in.readFloat());
+    return fw;
+  }
+
+  public FloatWritable readFloat() throws IOException {
+    return readFloat(null);
+  }
+
+  public DoubleWritable readDouble(DoubleWritable dw) throws IOException {
+    if (dw == null) {
+      dw = new DoubleWritable();
+    }
+    dw.set(in.readDouble());
+    return dw;
+  }
+
+  public DoubleWritable readDouble() throws IOException {
+    return readDouble(null);
+  }
+
+  public Text readText(Text t) throws IOException {
+    if (t == null) {
+      t = new Text();
+    }
+    t.set(in.readString());
+    return t;
+  }
+
+  public Text readText() throws IOException {
+    return readText(null);
+  }
+
+  public ArrayWritable readArray(ArrayWritable aw) throws IOException {
+    if (aw == null) {
+      aw = new ArrayWritable(TypedBytesWritable.class);
+    } else if (!aw.getValueClass().equals(TypedBytesWritable.class)) {
+      throw new RuntimeException("value class has to be TypedBytesWritable");
+    }
+    int length = in.readVectorHeader();
+    Writable[] writables = new Writable[length];
+    for (int i = 0; i < length; i++) {
+      writables[i] = new TypedBytesWritable(in.readRaw());
+    }
+    aw.set(writables);
+    return aw;
+  }
+
+  public ArrayWritable readArray() throws IOException {
+    return readArray(null);
+  }
+
+  public MapWritable readMap(MapWritable mw) throws IOException {
+    if (mw == null) {
+      mw = new MapWritable();
+    }
+    int length = in.readMapHeader();
+    for (int i = 0; i < length; i++) {
+      Writable key = read();
+      Writable value = read();
+      mw.put(key, value);
+    }
+    return mw;
+  }
+
+  public MapWritable readMap() throws IOException {
+    return readMap(null);
+  }
+
+  public SortedMapWritable readSortedMap(SortedMapWritable mw)
+    throws IOException {
+    if (mw == null) {
+      mw = new SortedMapWritable();
+    }
+    int length = in.readMapHeader();
+    for (int i = 0; i < length; i++) {
+      WritableComparable key = (WritableComparable) read();
+      Writable value = read();
+      mw.put(key, value);
+    }
+    return mw;
+  }
+
+  public SortedMapWritable readSortedMap() throws IOException {
+    return readSortedMap(null);
+  }
+  
+  public Writable readWritable(Writable writable) throws IOException {
+    ByteArrayInputStream bais = new ByteArrayInputStream(in.readBytes());
+    DataInputStream dis = new DataInputStream(bais);
+    String className = WritableUtils.readString(dis);
+    if (writable == null) {
+      try {
+        Class<? extends Writable> cls = 
+          conf.getClassByName(className).asSubclass(Writable.class);
+        writable = (Writable) ReflectionUtils.newInstance(cls, conf);
+      } catch (ClassNotFoundException e) {
+        throw new IOException(e);
+      }
+    } else if (!writable.getClass().getName().equals(className)) {
+      throw new IOException("wrong Writable class given");
+    }
+    writable.readFields(dis);
+    return writable;
+  }
+
+  public Writable readWritable() throws IOException {
+    return readWritable(null);
+  }
+
+  public Configuration getConf() {
+    return conf;
+  }
+
+  public void setConf(Configuration conf) {
+    this.conf = conf;
+  }
+  
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesWritableOutput.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesWritableOutput.java?rev=807330&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesWritableOutput.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/typedbytes/TypedBytesWritableOutput.java Mon Aug 24 18:23:19 2009
@@ -0,0 +1,232 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.util.typedbytes;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Map;
+
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.io.ArrayWritable;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.SortedMapWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.VIntWritable;
+import org.apache.hadoop.io.VLongWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.record.Record;
+
+/**
+ * Provides functionality for writing Writable objects as typed bytes.
+ * 
+ * @see TypedBytesOutput
+ */
+public class TypedBytesWritableOutput {
+
+  private TypedBytesOutput out;
+
+  private TypedBytesWritableOutput() {}
+
+  private void setTypedBytesOutput(TypedBytesOutput out) {
+    this.out = out;
+  }
+
+  private static ThreadLocal tbOut = new ThreadLocal() {
+    protected synchronized Object initialValue() {
+      return new TypedBytesWritableOutput();
+    }
+  };
+
+  /**
+   * Get a thread-local typed bytes writable input for the supplied
+   * {@link TypedBytesOutput}.
+   * 
+   * @param out typed bytes output object
+   * @return typed bytes writable output corresponding to the supplied
+   *         {@link TypedBytesOutput}.
+   */
+  public static TypedBytesWritableOutput get(TypedBytesOutput out) {
+    TypedBytesWritableOutput bout = (TypedBytesWritableOutput) tbOut.get();
+    bout.setTypedBytesOutput(out);
+    return bout;
+  }
+
+  /**
+   * Get a thread-local typed bytes writable output for the supplied
+   * {@link DataOutput}.
+   * 
+   * @param out data output object
+   * @return typed bytes writable output corresponding to the supplied
+   *         {@link DataOutput}.
+   */
+  public static TypedBytesWritableOutput get(DataOutput out) {
+    return get(TypedBytesOutput.get(out));
+  }
+
+  /** Creates a new instance of TypedBytesWritableOutput. */
+  public TypedBytesWritableOutput(TypedBytesOutput out) {
+    this();
+    this.out = out;
+  }
+
+  /** Creates a new instance of TypedBytesWritableOutput. */
+  public TypedBytesWritableOutput(DataOutput dout) {
+    this(new TypedBytesOutput(dout));
+  }
+
+  public void write(Writable w) throws IOException {
+    if (w instanceof TypedBytesWritable) {
+      writeTypedBytes((TypedBytesWritable) w);
+    } else if (w instanceof BytesWritable) {
+      writeBytes((BytesWritable) w);
+    } else if (w instanceof ByteWritable) {
+      writeByte((ByteWritable) w);
+    } else if (w instanceof BooleanWritable) {
+      writeBoolean((BooleanWritable) w);
+    } else if (w instanceof IntWritable) {
+      writeInt((IntWritable) w);
+    } else if (w instanceof VIntWritable) {
+      writeVInt((VIntWritable) w);
+    } else if (w instanceof LongWritable) {
+      writeLong((LongWritable) w);
+    } else if (w instanceof VLongWritable) {
+      writeVLong((VLongWritable) w);
+    } else if (w instanceof FloatWritable) {
+      writeFloat((FloatWritable) w);
+    } else if (w instanceof DoubleWritable) {
+      writeDouble((DoubleWritable) w);
+    } else if (w instanceof Text) {
+      writeText((Text) w);
+    } else if (w instanceof ShortWritable) {
+      writeShort((ShortWritable)w);
+    } else if (w instanceof ArrayWritable) {
+      writeArray((ArrayWritable) w);
+    } else if (w instanceof MapWritable) {
+      writeMap((MapWritable) w);
+    } else if (w instanceof SortedMapWritable) {
+      writeSortedMap((SortedMapWritable) w);
+    } else if (w instanceof Record) {
+      writeRecord((Record) w);
+    } else {
+      writeWritable(w); // last resort
+    }
+  }
+
+  public void writeTypedBytes(TypedBytesWritable tbw) throws IOException {
+    out.writeRaw(tbw.get(), 0, tbw.getSize());
+  }
+
+  public void writeBytes(BytesWritable bw) throws IOException {
+    byte[] bytes = Arrays.copyOfRange(bw.get(), 0, bw.getSize());
+    out.writeBytes(bytes);
+  }
+
+  public void writeByte(ByteWritable bw) throws IOException {
+    out.writeByte(bw.get());
+  }
+
+  public void writeBoolean(BooleanWritable bw) throws IOException {
+    out.writeBool(bw.get());
+  }
+
+  public void writeInt(IntWritable iw) throws IOException {
+    out.writeInt(iw.get());
+  }
+
+  public void writeVInt(VIntWritable viw) throws IOException {
+    out.writeInt(viw.get());
+  }
+
+  public void writeLong(LongWritable lw) throws IOException {
+    out.writeLong(lw.get());
+  }
+
+  public void writeVLong(VLongWritable vlw) throws IOException {
+    out.writeLong(vlw.get());
+  }
+
+  public void writeFloat(FloatWritable fw) throws IOException {
+    out.writeFloat(fw.get());
+  }
+
+  public void writeDouble(DoubleWritable dw) throws IOException {
+    out.writeDouble(dw.get());
+  }
+
+  public void writeShort(ShortWritable sw) throws IOException {
+    out.writeShort(sw.get());
+  }
+
+  public void writeText(Text t) throws IOException {
+    out.writeString(t.toString());
+  }
+
+  public void writeArray(ArrayWritable aw) throws IOException {
+    Writable[] writables = aw.get();
+    out.writeVectorHeader(writables.length);
+    for (Writable writable : writables) {
+      write(writable);
+    }
+  }
+
+  public void writeMap(MapWritable mw) throws IOException {
+    out.writeMapHeader(mw.size());
+    for (Map.Entry<Writable, Writable> entry : mw.entrySet()) {
+      write(entry.getKey());
+      write(entry.getValue());
+    }
+  }
+
+  public void writeSortedMap(SortedMapWritable smw) throws IOException {
+    out.writeMapHeader(smw.size());
+    for (Map.Entry<WritableComparable, Writable> entry : smw.entrySet()) {
+      write(entry.getKey());
+      write(entry.getValue());
+    }
+  }
+
+  public void writeRecord(Record r) throws IOException {
+    r.serialize(TypedBytesRecordOutput.get(out));
+  }
+
+  public void writeWritable(Writable w) throws IOException {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    DataOutputStream dos = new DataOutputStream(baos);
+    WritableUtils.writeString(dos, w.getClass().getName());
+    w.write(dos);
+    dos.close();
+    out.writeBytes(baos.toByteArray(), Type.WRITABLE.code);
+  }
+
+  public void writeEndOfRecord() throws IOException {
+    out.writeEndOfRecord();
+  }
+}

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Mon Aug 24 18:23:19 2009
@@ -205,7 +205,8 @@
     Operator<scriptDesc> op2 = OperatorFactory.get
       (new scriptDesc("/bin/cat",
           PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
-          PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value")),
+          PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"), 
+          TextRecordReader.class),
        op3);
 
 
@@ -344,8 +345,9 @@
 
     Operator<scriptDesc> op0 = OperatorFactory.get
     (new scriptDesc("/bin/cat",
+        PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
         PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
-        PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value")),
+        TextRecordReader.class),
      op1);
 
     Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
@@ -425,7 +427,8 @@
     Operator<scriptDesc> op0 = OperatorFactory.get
       (new scriptDesc("\'/bin/cat\'",
           PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
-          PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue")),
+          PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
+          TextRecordReader.class),
        op1);
 
     Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Mon Aug 24 18:23:19 2009
@@ -191,7 +191,7 @@
       // scriptOperator to echo the output of the select
       tableDesc scriptOutput = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
       tableDesc scriptInput  = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
-      scriptDesc sd = new scriptDesc("cat", scriptOutput, scriptInput);
+      scriptDesc sd = new scriptDesc("cat", scriptOutput, scriptInput, TextRecordReader.class);
       Operator<scriptDesc> sop = OperatorFactory.getAndMakeChild(sd, op);
 
       // Collect operator to observe the output of the script

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input38.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input38.q?rev=807330&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input38.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input38.q Mon Aug 24 18:23:19 2009
@@ -0,0 +1,22 @@
+drop table dest1;
+CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE;
+
+EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING '/bin/cat'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, tmap.value;
+
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING '/bin/cat'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, tmap.value;
+
+
+SELECT dest1.* FROM dest1;
+
+drop table dest1;

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/script_error.q.out Mon Aug 24 18:23:19 2009
@@ -2,7 +2,7 @@
 SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
 FROM src
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) '../data/scripts/error_script' (TOK_ALIASLIST tkey tvalue))))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE '../data/scripts/error_script' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue))))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -42,5 +42,5 @@
 query: SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue)
 FROM src
 Input: default/src
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2091756058/10000
+Output: file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/tmp/472875737/10000
 FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out Mon Aug 24 18:23:19 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -98,7 +98,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/547922679/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/261540097/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out Mon Aug 24 18:23:19 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)) (TOK_LIMIT 20))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)) (TOK_LIMIT 20))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -61,7 +61,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/293162762/10002 
+        file:/data/users/njain/hive4/hive4/build/ql/tmp/951832733/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -128,7 +128,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/181640407/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/248123483/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out Mon Aug 24 18:23:19 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (+ (. (TOK_TABLE_OR_COL src_thrift) aint) ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0)) ([ (. (TOK_TABLE_OR_COL src_thrift) lintstring) 0)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue)))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (+ (. (TOK_TABLE_OR_COL src_thrift) aint) ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 0)) ([ (. (TOK_TABLE_OR_COL src_thrift) lintstring) 0)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -94,7 +94,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/2071608019/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/969741359/10000
 NULL	null
 -1461153966	{"myint":49,"mystring":"343","underscore_int":7}
 -1952710705	{"myint":25,"mystring":"125","underscore_int":5}

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out Mon Aug 24 18:23:19 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tmap.key, regexp_replace(tmap.value,'\t','+') WHERE tmap.key < 100
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value) (+ 1 2) (+ 3 4)) '/bin/cat'))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) key)) (TOK_SELEXPR (TOK_FUNCTION regexp_replace (. (TOK_TABLE_OR_COL tmap) value) '\t' '+'))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) key) 100))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value) (+ 1 2) (+ 3 4)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) key)) (TOK_SELEXPR (TOK_FUNCTION regexp_replace (. (TOK_TABLE_OR_COL tmap) value) '\t' '+'))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) key) 100))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -102,7 +102,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/1006587831/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/1142241698/10000
 0	val_0+3+7
 0	val_0+3+7
 0	val_0+3+7

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out Mon Aug 24 18:23:19 2009
@@ -12,7 +12,7 @@
 USING '../data/scripts/input20_script'
 AS key, value
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) 'cat'))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) '../data/scripts/input20_script' (TOK_ALIASLIST key value))))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) TOK_SERDE 'cat' TOK_SERDE TOK_RECORDREADER))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) TOK_SERDE '../data/scripts/input20_script' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST key value))))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -109,7 +109,7 @@
 Output: default/dest1
 query: SELECT * FROM dest1 SORT BY key, value
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/660141896/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/285599014/10000
 1	105_105
 1	10_10
 1	111_111

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input33.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input33.q.out?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input33.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input33.q.out Mon Aug 24 18:23:19 2009
@@ -12,7 +12,7 @@
 USING '../data/scripts/input20_script'
 AS (key STRING, value STRING)
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) 'cat'))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) '../data/scripts/input20_script' (TOK_TABCOLLIST (TOK_TABCOL key TOK_STRING) (TOK_TABCOL value TOK_STRING)))))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) TOK_SERDE 'cat' TOK_SERDE TOK_RECORDREADER))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) TOK_SERDE '../data/scripts/input20_script' TOK_SERDE TOK_RECORDREADER (TOK_TABCOLLIST (TOK_TABCOL key TOK_STRING) (TOK_TABCOL value TOK_STRING)))))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -109,7 +109,7 @@
 Output: default/dest1
 query: SELECT * FROM dest1 SORT BY key, value
 Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/410502456/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/1305074908/10000
 1	105_105
 1	10_10
 1	111_111

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input34.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input34.q.out?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input34.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input34.q.out Mon Aug 24 18:23:19 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe')) '/bin/cat' (TOK_ALIASLIST tkey tvalue) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'))))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe')) '/bin/cat' (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe')) TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -63,10 +63,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/njain/hive3/hive3/build/ql/tmp/1093532122/10000
+                destination: file:/data/users/njain/hive4/hive4/build/ql/tmp/1318231678/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/njain/hive3/hive3/build/ql/tmp/754709366/10002 
+              file:/data/users/njain/hive4/hive4/build/ql/tmp/155692226/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -111,7 +111,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/248092577/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/1296071512/10000
 238	val_238
 86	val_86
 311	val_311

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input35.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input35.q.out?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input35.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input35.q.out Mon Aug 24 18:23:19 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002'))) '/bin/cat' (TOK_ALIASLIST tkey tvalue) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002')))))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002'))) '/bin/cat' (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002'))) TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -63,10 +63,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/njain/hive3/hive3/build/ql/tmp/525283863/10000
+                destination: file:/data/users/njain/hive4/hive4/build/ql/tmp/690413108/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/njain/hive3/hive3/build/ql/tmp/84195792/10002 
+              file:/data/users/njain/hive4/hive4/build/ql/tmp/1173839113/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -111,7 +111,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1714970451/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/1858771858/10000
 238	val_238
 86	val_86
 311	val_311

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input36.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input36.q.out?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input36.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input36.q.out Mon Aug 24 18:23:19 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002'))) '/bin/cat' (TOK_ALIASLIST tkey tvalue) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\003')))))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002'))) '/bin/cat' (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\003'))) TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -63,10 +63,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/njain/hive3/hive3/build/ql/tmp/1966676963/10000
+                destination: file:/data/users/njain/hive4/hive4/build/ql/tmp/1126551991/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/njain/hive3/hive3/build/ql/tmp/1028977168/10002 
+              file:/data/users/njain/hive4/hive4/build/ql/tmp/564377898/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -111,7 +111,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1073316238/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/7955520/10000
 NULL	NULL
 NULL	NULL
 NULL	NULL

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/input38.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input38.q.out?rev=807330&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input38.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input38.q.out Mon Aug 24 18:23:19 2009
@@ -0,0 +1,611 @@
+query: drop table dest1
+query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING '/bin/cat'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, tmap.value
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value) (+ 1 2) (+ 3 4)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) value)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-4 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-4
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        tmap:src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+                    expr: (1 + 2)
+                    type: int
+                    expr: (3 + 4)
+                    type: int
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Transform Operator
+                command: /bin/cat
+                output info:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                Select Operator
+                  expressions:
+                        expr: key
+                        type: string
+                        expr: value
+                        type: string
+                  outputColumnNames: _col0, _col1
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 1
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: dest1
+
+  Stage: Stage-4
+    Conditional Operator
+      list of dependent Tasks:
+          Move Operator
+            files:
+                hdfs directory: true
+                destination: file:/data/users/njain/hive4/hive4/build/ql/tmp/1215985021/10000
+          Map Reduce
+            Alias -> Map Operator Tree:
+              file:/data/users/njain/hive4/hive4/build/ql/tmp/986711911/10002 
+                  Reduce Output Operator
+                    sort order: 
+                    Map-reduce partition columns:
+                          expr: rand()
+                          type: double
+                    tag: -1
+                    value expressions:
+                          expr: key
+                          type: string
+                          expr: value
+                          type: string
+            Reduce Operator Tree:
+              Extract
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: dest1
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: dest1
+
+
+query: FROM (
+  FROM src
+  SELECT TRANSFORM(src.key, src.value, 1+2, 3+4)
+         USING '/bin/cat'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tmap.key, tmap.value
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/1895427421/10000
+238	val_238	3	7
+86	val_86	3	7
+311	val_311	3	7
+27	val_27	3	7
+165	val_165	3	7
+409	val_409	3	7
+255	val_255	3	7
+278	val_278	3	7
+98	val_98	3	7
+484	val_484	3	7
+265	val_265	3	7
+193	val_193	3	7
+401	val_401	3	7
+150	val_150	3	7
+273	val_273	3	7
+224	val_224	3	7
+369	val_369	3	7
+66	val_66	3	7
+128	val_128	3	7
+213	val_213	3	7
+146	val_146	3	7
+406	val_406	3	7
+429	val_429	3	7
+374	val_374	3	7
+152	val_152	3	7
+469	val_469	3	7
+145	val_145	3	7
+495	val_495	3	7
+37	val_37	3	7
+327	val_327	3	7
+281	val_281	3	7
+277	val_277	3	7
+209	val_209	3	7
+15	val_15	3	7
+82	val_82	3	7
+403	val_403	3	7
+166	val_166	3	7
+417	val_417	3	7
+430	val_430	3	7
+252	val_252	3	7
+292	val_292	3	7
+219	val_219	3	7
+287	val_287	3	7
+153	val_153	3	7
+193	val_193	3	7
+338	val_338	3	7
+446	val_446	3	7
+459	val_459	3	7
+394	val_394	3	7
+237	val_237	3	7
+482	val_482	3	7
+174	val_174	3	7
+413	val_413	3	7
+494	val_494	3	7
+207	val_207	3	7
+199	val_199	3	7
+466	val_466	3	7
+208	val_208	3	7
+174	val_174	3	7
+399	val_399	3	7
+396	val_396	3	7
+247	val_247	3	7
+417	val_417	3	7
+489	val_489	3	7
+162	val_162	3	7
+377	val_377	3	7
+397	val_397	3	7
+309	val_309	3	7
+365	val_365	3	7
+266	val_266	3	7
+439	val_439	3	7
+342	val_342	3	7
+367	val_367	3	7
+325	val_325	3	7
+167	val_167	3	7
+195	val_195	3	7
+475	val_475	3	7
+17	val_17	3	7
+113	val_113	3	7
+155	val_155	3	7
+203	val_203	3	7
+339	val_339	3	7
+0	val_0	3	7
+455	val_455	3	7
+128	val_128	3	7
+311	val_311	3	7
+316	val_316	3	7
+57	val_57	3	7
+302	val_302	3	7
+205	val_205	3	7
+149	val_149	3	7
+438	val_438	3	7
+345	val_345	3	7
+129	val_129	3	7
+170	val_170	3	7
+20	val_20	3	7
+489	val_489	3	7
+157	val_157	3	7
+378	val_378	3	7
+221	val_221	3	7
+92	val_92	3	7
+111	val_111	3	7
+47	val_47	3	7
+72	val_72	3	7
+4	val_4	3	7
+280	val_280	3	7
+35	val_35	3	7
+427	val_427	3	7
+277	val_277	3	7
+208	val_208	3	7
+356	val_356	3	7
+399	val_399	3	7
+169	val_169	3	7
+382	val_382	3	7
+498	val_498	3	7
+125	val_125	3	7
+386	val_386	3	7
+437	val_437	3	7
+469	val_469	3	7
+192	val_192	3	7
+286	val_286	3	7
+187	val_187	3	7
+176	val_176	3	7
+54	val_54	3	7
+459	val_459	3	7
+51	val_51	3	7
+138	val_138	3	7
+103	val_103	3	7
+239	val_239	3	7
+213	val_213	3	7
+216	val_216	3	7
+430	val_430	3	7
+278	val_278	3	7
+176	val_176	3	7
+289	val_289	3	7
+221	val_221	3	7
+65	val_65	3	7
+318	val_318	3	7
+332	val_332	3	7
+311	val_311	3	7
+275	val_275	3	7
+137	val_137	3	7
+241	val_241	3	7
+83	val_83	3	7
+333	val_333	3	7
+180	val_180	3	7
+284	val_284	3	7
+12	val_12	3	7
+230	val_230	3	7
+181	val_181	3	7
+67	val_67	3	7
+260	val_260	3	7
+404	val_404	3	7
+384	val_384	3	7
+489	val_489	3	7
+353	val_353	3	7
+373	val_373	3	7
+272	val_272	3	7
+138	val_138	3	7
+217	val_217	3	7
+84	val_84	3	7
+348	val_348	3	7
+466	val_466	3	7
+58	val_58	3	7
+8	val_8	3	7
+411	val_411	3	7
+230	val_230	3	7
+208	val_208	3	7
+348	val_348	3	7
+24	val_24	3	7
+463	val_463	3	7
+431	val_431	3	7
+179	val_179	3	7
+172	val_172	3	7
+42	val_42	3	7
+129	val_129	3	7
+158	val_158	3	7
+119	val_119	3	7
+496	val_496	3	7
+0	val_0	3	7
+322	val_322	3	7
+197	val_197	3	7
+468	val_468	3	7
+393	val_393	3	7
+454	val_454	3	7
+100	val_100	3	7
+298	val_298	3	7
+199	val_199	3	7
+191	val_191	3	7
+418	val_418	3	7
+96	val_96	3	7
+26	val_26	3	7
+165	val_165	3	7
+327	val_327	3	7
+230	val_230	3	7
+205	val_205	3	7
+120	val_120	3	7
+131	val_131	3	7
+51	val_51	3	7
+404	val_404	3	7
+43	val_43	3	7
+436	val_436	3	7
+156	val_156	3	7
+469	val_469	3	7
+468	val_468	3	7
+308	val_308	3	7
+95	val_95	3	7
+196	val_196	3	7
+288	val_288	3	7
+481	val_481	3	7
+457	val_457	3	7
+98	val_98	3	7
+282	val_282	3	7
+197	val_197	3	7
+187	val_187	3	7
+318	val_318	3	7
+318	val_318	3	7
+409	val_409	3	7
+470	val_470	3	7
+137	val_137	3	7
+369	val_369	3	7
+316	val_316	3	7
+169	val_169	3	7
+413	val_413	3	7
+85	val_85	3	7
+77	val_77	3	7
+0	val_0	3	7
+490	val_490	3	7
+87	val_87	3	7
+364	val_364	3	7
+179	val_179	3	7
+118	val_118	3	7
+134	val_134	3	7
+395	val_395	3	7
+282	val_282	3	7
+138	val_138	3	7
+238	val_238	3	7
+419	val_419	3	7
+15	val_15	3	7
+118	val_118	3	7
+72	val_72	3	7
+90	val_90	3	7
+307	val_307	3	7
+19	val_19	3	7
+435	val_435	3	7
+10	val_10	3	7
+277	val_277	3	7
+273	val_273	3	7
+306	val_306	3	7
+224	val_224	3	7
+309	val_309	3	7
+389	val_389	3	7
+327	val_327	3	7
+242	val_242	3	7
+369	val_369	3	7
+392	val_392	3	7
+272	val_272	3	7
+331	val_331	3	7
+401	val_401	3	7
+242	val_242	3	7
+452	val_452	3	7
+177	val_177	3	7
+226	val_226	3	7
+5	val_5	3	7
+497	val_497	3	7
+402	val_402	3	7
+396	val_396	3	7
+317	val_317	3	7
+395	val_395	3	7
+58	val_58	3	7
+35	val_35	3	7
+336	val_336	3	7
+95	val_95	3	7
+11	val_11	3	7
+168	val_168	3	7
+34	val_34	3	7
+229	val_229	3	7
+233	val_233	3	7
+143	val_143	3	7
+472	val_472	3	7
+322	val_322	3	7
+498	val_498	3	7
+160	val_160	3	7
+195	val_195	3	7
+42	val_42	3	7
+321	val_321	3	7
+430	val_430	3	7
+119	val_119	3	7
+489	val_489	3	7
+458	val_458	3	7
+78	val_78	3	7
+76	val_76	3	7
+41	val_41	3	7
+223	val_223	3	7
+492	val_492	3	7
+149	val_149	3	7
+449	val_449	3	7
+218	val_218	3	7
+228	val_228	3	7
+138	val_138	3	7
+453	val_453	3	7
+30	val_30	3	7
+209	val_209	3	7
+64	val_64	3	7
+468	val_468	3	7
+76	val_76	3	7
+74	val_74	3	7
+342	val_342	3	7
+69	val_69	3	7
+230	val_230	3	7
+33	val_33	3	7
+368	val_368	3	7
+103	val_103	3	7
+296	val_296	3	7
+113	val_113	3	7
+216	val_216	3	7
+367	val_367	3	7
+344	val_344	3	7
+167	val_167	3	7
+274	val_274	3	7
+219	val_219	3	7
+239	val_239	3	7
+485	val_485	3	7
+116	val_116	3	7
+223	val_223	3	7
+256	val_256	3	7
+263	val_263	3	7
+70	val_70	3	7
+487	val_487	3	7
+480	val_480	3	7
+401	val_401	3	7
+288	val_288	3	7
+191	val_191	3	7
+5	val_5	3	7
+244	val_244	3	7
+438	val_438	3	7
+128	val_128	3	7
+467	val_467	3	7
+432	val_432	3	7
+202	val_202	3	7
+316	val_316	3	7
+229	val_229	3	7
+469	val_469	3	7
+463	val_463	3	7
+280	val_280	3	7
+2	val_2	3	7
+35	val_35	3	7
+283	val_283	3	7
+331	val_331	3	7
+235	val_235	3	7
+80	val_80	3	7
+44	val_44	3	7
+193	val_193	3	7
+321	val_321	3	7
+335	val_335	3	7
+104	val_104	3	7
+466	val_466	3	7
+366	val_366	3	7
+175	val_175	3	7
+403	val_403	3	7
+483	val_483	3	7
+53	val_53	3	7
+105	val_105	3	7
+257	val_257	3	7
+406	val_406	3	7
+409	val_409	3	7
+190	val_190	3	7
+406	val_406	3	7
+401	val_401	3	7
+114	val_114	3	7
+258	val_258	3	7
+90	val_90	3	7
+203	val_203	3	7
+262	val_262	3	7
+348	val_348	3	7
+424	val_424	3	7
+12	val_12	3	7
+396	val_396	3	7
+201	val_201	3	7
+217	val_217	3	7
+164	val_164	3	7
+431	val_431	3	7
+454	val_454	3	7
+478	val_478	3	7
+298	val_298	3	7
+125	val_125	3	7
+431	val_431	3	7
+164	val_164	3	7
+424	val_424	3	7
+187	val_187	3	7
+382	val_382	3	7
+5	val_5	3	7
+70	val_70	3	7
+397	val_397	3	7
+480	val_480	3	7
+291	val_291	3	7
+24	val_24	3	7
+351	val_351	3	7
+255	val_255	3	7
+104	val_104	3	7
+70	val_70	3	7
+163	val_163	3	7
+438	val_438	3	7
+119	val_119	3	7
+414	val_414	3	7
+200	val_200	3	7
+491	val_491	3	7
+237	val_237	3	7
+439	val_439	3	7
+360	val_360	3	7
+248	val_248	3	7
+479	val_479	3	7
+305	val_305	3	7
+417	val_417	3	7
+199	val_199	3	7
+444	val_444	3	7
+120	val_120	3	7
+429	val_429	3	7
+169	val_169	3	7
+443	val_443	3	7
+323	val_323	3	7
+325	val_325	3	7
+277	val_277	3	7
+230	val_230	3	7
+478	val_478	3	7
+178	val_178	3	7
+468	val_468	3	7
+310	val_310	3	7
+317	val_317	3	7
+333	val_333	3	7
+493	val_493	3	7
+460	val_460	3	7
+207	val_207	3	7
+249	val_249	3	7
+265	val_265	3	7
+480	val_480	3	7
+83	val_83	3	7
+136	val_136	3	7
+353	val_353	3	7
+172	val_172	3	7
+214	val_214	3	7
+462	val_462	3	7
+233	val_233	3	7
+406	val_406	3	7
+133	val_133	3	7
+175	val_175	3	7
+189	val_189	3	7
+454	val_454	3	7
+375	val_375	3	7
+401	val_401	3	7
+421	val_421	3	7
+407	val_407	3	7
+384	val_384	3	7
+256	val_256	3	7
+26	val_26	3	7
+134	val_134	3	7
+67	val_67	3	7
+384	val_384	3	7
+379	val_379	3	7
+18	val_18	3	7
+462	val_462	3	7
+492	val_492	3	7
+100	val_100	3	7
+298	val_298	3	7
+9	val_9	3	7
+341	val_341	3	7
+498	val_498	3	7
+146	val_146	3	7
+458	val_458	3	7
+362	val_362	3	7
+186	val_186	3	7
+285	val_285	3	7
+348	val_348	3	7
+167	val_167	3	7
+18	val_18	3	7
+273	val_273	3	7
+183	val_183	3	7
+281	val_281	3	7
+344	val_344	3	7
+97	val_97	3	7
+469	val_469	3	7
+315	val_315	3	7
+84	val_84	3	7
+28	val_28	3	7
+37	val_37	3	7
+448	val_448	3	7
+152	val_152	3	7
+348	val_348	3	7
+307	val_307	3	7
+194	val_194	3	7
+414	val_414	3	7
+477	val_477	3	7
+222	val_222	3	7
+126	val_126	3	7
+90	val_90	3	7
+169	val_169	3	7
+403	val_403	3	7
+400	val_400	3	7
+200	val_200	3	7
+97	val_97	3	7
+query: drop table dest1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input5.q.out Mon Aug 24 18:23:19 2009
@@ -8,7 +8,7 @@
 ) tmap
 INSERT OVERWRITE TABLE dest1 SELECT tmap.tkey, tmap.tvalue
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src_thrift) lint) (. (TOK_TABLE_OR_COL src_thrift) lintstring)) '/bin/cat' (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue)))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src_thrift) lint) (. (TOK_TABLE_OR_COL src_thrift) lintstring)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -87,7 +87,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/120418612/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/212735197/10000
 [0,0,0]	[{"myint":0,"mystring":"0","underscore_int":0}]
 [1,2,3]	[{"myint":1,"mystring":"1","underscore_int":1}]
 [2,4,6]	[{"myint":4,"mystring":"8","underscore_int":2}]

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out Mon Aug 24 18:23:19 2009
@@ -7,7 +7,7 @@
 DISTRIBUTE BY tvalue, tkey
 SORT BY ten, one
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ten)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL one)))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL ten)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL one)))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -101,7 +101,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/1594936617/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/1629612062/10000
 0	0	0	val_0
 0	0	0	val_0
 0	0	0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out?rev=807330&r1=807329&r2=807330&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out Mon Aug 24 18:23:19 2009
@@ -6,7 +6,7 @@
 USING '/bin/cat' AS (tkey, ten, one, tvalue)
 DISTRIBUTE BY tvalue, tkey
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) '/bin/cat' (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (TOK_FUNCTION TOK_INT (/ (. (TOK_TABLE_OR_COL src) key) 10)) (TOK_FUNCTION TOK_INT (% (. (TOK_TABLE_OR_COL src) key) 10)) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey ten one tvalue)))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL tvalue) (TOK_TABLE_OR_COL tkey))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -94,7 +94,7 @@
 Output: default/dest1
 query: SELECT * FROM (SELECT dest1.* FROM dest1 DISTRIBUTE BY key SORT BY key, ten, one, value) T
 Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/593292888/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/1007603652/10000
 0	0	0	val_0
 0	0	0	val_0
 0	0	0	val_0