You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/07/29 23:08:19 UTC

svn commit: r1508202 [13/48] - in /hive/branches/tez: ./ beeline/src/java/org/apache/hive/beeline/ cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/metrics/ common/src/java/org/apache/hadoop/hive/conf/ common/src/t...

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java Mon Jul 29 21:08:03 2013
@@ -18,20 +18,20 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
-import java.util.List;
-import java.util.LinkedList;
 import java.lang.reflect.Method;
+import java.util.LinkedList;
+import java.util.List;
 
 import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
 
 public class TestFunctionRegistry extends TestCase {
 
@@ -41,10 +41,11 @@ public class TestFunctionRegistry extend
     public void one(IntWritable x, HiveDecimalWritable y) {}
     public void one(IntWritable x, DoubleWritable y) {}
     public void one(IntWritable x, IntWritable y) {}
+    public void mismatch(DateWritable x, HiveDecimalWritable y) {}
     public void mismatch(TimestampWritable x, HiveDecimalWritable y) {}
     public void mismatch(BytesWritable x, DoubleWritable y) {}
   }
-  
+
   @Override
   protected void setUp() {
   }
@@ -58,17 +59,18 @@ public class TestFunctionRegistry extend
     implicit(TypeInfoFactory.floatTypeInfo, TypeInfoFactory.decimalTypeInfo, true);
     implicit(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.decimalTypeInfo, true);
     implicit(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.decimalTypeInfo, true);
+    implicit(TypeInfoFactory.dateTypeInfo, TypeInfoFactory.decimalTypeInfo, false);
     implicit(TypeInfoFactory.timestampTypeInfo, TypeInfoFactory.decimalTypeInfo, false);
   }
 
-  private void verify(Class udf, String name, TypeInfo ta, TypeInfo tb, 
+  private void verify(Class udf, String name, TypeInfo ta, TypeInfo tb,
                       Class a, Class b, boolean throwException) {
     List<TypeInfo> args = new LinkedList<TypeInfo>();
     args.add(ta);
     args.add(tb);
 
     Method result = null;
-    
+
     try {
       result = FunctionRegistry.getMethodInternal(udf, name, false, args);
     } catch (UDFArgumentException e) {
@@ -116,13 +118,13 @@ public class TestFunctionRegistry extend
   }
 
   public void testCommonClass() {
-    common(TypeInfoFactory.intTypeInfo, TypeInfoFactory.decimalTypeInfo, 
+    common(TypeInfoFactory.intTypeInfo, TypeInfoFactory.decimalTypeInfo,
            TypeInfoFactory.decimalTypeInfo);
-    common(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.decimalTypeInfo, 
+    common(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.decimalTypeInfo,
            TypeInfoFactory.stringTypeInfo);
-    common(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.decimalTypeInfo, 
+    common(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.decimalTypeInfo,
            TypeInfoFactory.decimalTypeInfo);
-    common(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.stringTypeInfo, 
+    common(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.stringTypeInfo,
            TypeInfoFactory.stringTypeInfo);
   }
 
@@ -131,13 +133,13 @@ public class TestFunctionRegistry extend
   }
 
   public void testCommonClassComparison() {
-    comparison(TypeInfoFactory.intTypeInfo, TypeInfoFactory.decimalTypeInfo, 
+    comparison(TypeInfoFactory.intTypeInfo, TypeInfoFactory.decimalTypeInfo,
                TypeInfoFactory.decimalTypeInfo);
-    comparison(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.decimalTypeInfo, 
+    comparison(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.decimalTypeInfo,
                TypeInfoFactory.decimalTypeInfo);
-    comparison(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.decimalTypeInfo, 
+    comparison(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.decimalTypeInfo,
                TypeInfoFactory.decimalTypeInfo);
-    comparison(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.stringTypeInfo, 
+    comparison(TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.stringTypeInfo,
                TypeInfoFactory.doubleTypeInfo);
   }
 

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Mon Jul 29 21:08:03 2013
@@ -345,13 +345,13 @@ public class TestOperators extends TestC
 
       // initialize mapredWork
       MapredWork mrwork = new MapredWork();
-      mrwork.setPathToAliases(pathToAliases);
-      mrwork.setPathToPartitionInfo(pathToPartitionInfo);
-      mrwork.setAliasToWork(aliasToWork);
+      mrwork.getMapWork().setPathToAliases(pathToAliases);
+      mrwork.getMapWork().setPathToPartitionInfo(pathToPartitionInfo);
+      mrwork.getMapWork().setAliasToWork(aliasToWork);
 
       // get map operator and initialize it
       MapOperator mo = new MapOperator();
-      mo.initializeAsRoot(hconf, mrwork);
+      mo.initializeAsRoot(hconf, mrwork.getMapWork());
 
       Text tw = new Text();
       InspectableObject io1 = new InspectableObject();

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java Mon Jul 29 21:08:03 2013
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.exec;
 
 import java.io.ByteArrayOutputStream;
+import java.io.File;
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
 
@@ -74,22 +75,23 @@ public class TestPlan extends TestCase {
       ao.put("a", op);
 
       MapredWork mrwork = new MapredWork();
-      mrwork.setPathToAliases(pa);
-      mrwork.setPathToPartitionInfo(pt);
-      mrwork.setAliasToWork(ao);
+      mrwork.getMapWork().setPathToAliases(pa);
+      mrwork.getMapWork().setPathToPartitionInfo(pt);
+      mrwork.getMapWork().setAliasToWork(ao);
 
       // serialize the configuration once ..
       ByteArrayOutputStream baos = new ByteArrayOutputStream();
-      Utilities.serializeMapRedWork(mrwork, baos);
+      Utilities.serializeObject(mrwork, baos);
       baos.close();
       String v1 = baos.toString();
 
       // store into configuration
       JobConf job = new JobConf(TestPlan.class);
       job.set("fs.default.name", "file:///");
-      Utilities.setMapRedWork(job, mrwork,"/tmp/" + System.getProperty("user.name") + "/hive");
+      Utilities.setMapRedWork(job, mrwork, System.getProperty("java.io.tmpdir") + File.separator +
+        System.getProperty("user.name") + File.separator + "hive");
       MapredWork mrwork2 = Utilities.getMapRedWork(job);
-      Utilities.clearMapRedWork(job);
+      Utilities.clearWork(job);
 
       // over here we should have some checks of the deserialized object against
       // the orginal object
@@ -97,7 +99,7 @@ public class TestPlan extends TestCase {
 
       // serialize again
       baos.reset();
-      Utilities.serializeMapRedWork(mrwork2, baos);
+      Utilities.serializeObject(mrwork2, baos);
       baos.close();
 
       // verify that the two are equal

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java Mon Jul 29 21:08:03 2013
@@ -18,11 +18,18 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import static org.apache.hadoop.hive.ql.exec.Utilities.getFileExtension;
 
 import junit.framework.TestCase;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.mapred.JobConf;
 
 public class TestUtilities extends TestCase {
@@ -53,4 +60,14 @@ public class TestUtilities extends TestC
     assertEquals("Custom extension for uncompressed text format", extension,
         getFileExtension(jc, true, new HiveIgnoreKeyTextOutputFormat()));
   }
+
+  public void testSerializeTimestamp() {
+    Timestamp ts = new Timestamp(1374554702000L);
+    ts.setNanos(123456);
+    ExprNodeConstantDesc constant = new ExprNodeConstantDesc(
+        TypeInfoFactory.timestampTypeInfo, ts);
+    String serialized = Utilities.serializeExpression(constant);
+    ExprNodeDesc deserialized = Utilities.deserializeExpression(serialized, new Configuration());
+    assertEquals(constant.getExprString(), deserialized.getExprString());
+  }
 }

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java Mon Jul 29 21:08:03 2013
@@ -38,7 +38,7 @@ public class VerifyHiveSortedInputFormat
       for (Task<? extends Serializable> rootTask : rootTasks) {
         if (rootTask.getWork() instanceof MapredWork) {
           Assert.assertTrue("The root map reduce task's input was not marked as sorted.",
-              ((MapredWork)rootTask.getWork()).isInputFormatSorted());
+              ((MapredWork)rootTask.getWork()).getMapWork().isInputFormatSorted());
         }
       }
     }

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java Mon Jul 29 21:08:03 2013
@@ -20,8 +20,6 @@ package org.apache.hadoop.hive.ql.io;
 import java.io.IOException;
 import java.util.Random;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -34,12 +32,13 @@ import org.apache.hadoop.io.SequenceFile
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
+import static org.junit.Assert.*;
 
 /**
  * PerformTestRCFileAndSeqFile.
  *
  */
-public class PerformTestRCFileAndSeqFile extends TestCase {
+public class PerformTestRCFileAndSeqFile {
 
   private final Configuration conf = new Configuration();
 

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestHiveInputOutputBuffer.java Mon Jul 29 21:08:03 2013
@@ -17,7 +17,13 @@
  */
 package org.apache.hadoop.hive.ql.io;
 
+import static org.junit.Assert.assertArrayEquals;
+
+import java.io.DataOutput;
 import java.io.IOException;
+import java.util.Random;
+
+import org.junit.Test;
 
 import junit.framework.TestCase;
 
@@ -27,17 +33,196 @@ import junit.framework.TestCase;
  */
 public class TestHiveInputOutputBuffer extends TestCase {
 
+  private static final int numCases = 14; 
+  
+  private static final String asciiLine1 = "Foo 12345 moo";
+  private static final String asciiLine2 = "Line two";
+  private static final String asciiString = asciiLine1 + "\n" + asciiLine2 + "\r\n";
+
   public void testReadAndWrite() throws IOException {
     String testString = "test_hive_input_output_number_0";
     byte[] string_bytes = testString.getBytes();
     NonSyncDataInputBuffer inBuffer = new NonSyncDataInputBuffer();
     NonSyncDataOutputBuffer outBuffer = new NonSyncDataOutputBuffer();
-    outBuffer.write(string_bytes);
-    inBuffer.reset(outBuffer.getData(), 0, outBuffer.getLength());
-    byte[] readBytes = new byte[string_bytes.length];
-    inBuffer.read(readBytes);
-    String readString = new String(readBytes);
-    assertEquals("Field testReadAndWrite()", readString, testString);
+    try {
+      outBuffer.write(string_bytes);
+      inBuffer.reset(outBuffer.getData(), 0, outBuffer.getLength());
+      byte[] readBytes = new byte[string_bytes.length];
+      inBuffer.read(readBytes);
+      String readString = new String(readBytes);
+      assertEquals("Field testReadAndWrite()", readString, testString);
+    } finally {
+      inBuffer.close();
+      outBuffer.close();
+    }
   }
 
+  @SuppressWarnings("deprecation")
+  private static void readJunk(NonSyncDataInputBuffer in, Random r, long seed, int iter) 
+      throws IOException {
+    r.setSeed(seed);
+    for (int i = 0; i < iter; ++i) {
+      switch (r.nextInt(numCases)) {
+        case 0:
+          assertEquals((byte)(r.nextInt() & 0xFF), in.readByte()); break;
+        case 1:
+          assertEquals((short)(r.nextInt() & 0xFFFF), in.readShort()); break;
+        case 2:
+          assertEquals(r.nextInt(), in.readInt()); break;
+        case 3:
+          assertEquals(r.nextLong(), in.readLong()); break;
+        case 4:
+          assertEquals(Double.doubleToLongBits(r.nextDouble()),
+                       Double.doubleToLongBits(in.readDouble())); break;
+        case 5:
+          assertEquals(Float.floatToIntBits(r.nextFloat()),
+                       Float.floatToIntBits(in.readFloat())); break;
+        case 6:
+          int len = r.nextInt(1024);
+          // 1 (test #readFully(3)):
+          final byte[] vb = new byte[len];
+          r.nextBytes(vb);
+          final byte[] b = new byte[len];
+          in.readFully(b, 0, len);
+          assertArrayEquals(vb, b);
+          // 2 (test #read(3)):
+          r.nextBytes(vb);
+          in.read(b, 0, len);
+          assertArrayEquals(vb, b);
+          // 3 (test #readFully(1)):
+          r.nextBytes(vb);
+          in.readFully(b);
+          assertArrayEquals(vb, b);
+          break;
+        case 7:
+          assertEquals(r.nextBoolean(), in.readBoolean());
+          break;
+        case 8:
+          assertEquals((char)r.nextInt(), in.readChar());
+          break;
+        case 9:
+          int actualUB = in.readUnsignedByte();
+          assertTrue(actualUB >= 0);
+          assertTrue(actualUB <= 255);
+          assertEquals(r.nextInt() & 0xFF, actualUB);
+          break;
+        case 10:
+          int actualUS = in.readUnsignedShort();
+          assertTrue(actualUS >= 0);
+          assertTrue(actualUS <= 0xFFFF);
+          assertEquals(r.nextInt() & 0xFFFF, actualUS);
+          break;
+        case 11:
+          String expectedString1 = composeString(1024, r);
+          assertEquals(expectedString1, in.readUTF());
+          String expectedString2 = composeString(1024, r);
+          assertEquals(expectedString2, NonSyncDataInputBuffer.readUTF(in));
+          break;
+        case 12:
+          assertEquals(asciiLine1, in.readLine());
+          assertEquals(asciiLine2, in.readLine());
+          break;
+        case 13:
+          in.skipBytes(8);
+          r.nextLong(); // ignore
+          assertEquals(r.nextLong(), in.readLong());
+          break;
+      }
+    }
+  }
+  
+  private static void writeJunk(DataOutput out, Random r, long seed, int iter)
+      throws IOException  {
+    r.setSeed(seed);
+    for (int i = 0; i < iter; ++i) {
+      switch (r.nextInt(numCases)) {
+        case 0: out.writeByte(r.nextInt()); break;
+        case 1: out.writeShort((short)(r.nextInt() & 0xFFFF)); break;
+        case 2: out.writeInt(r.nextInt()); break;
+        case 3: out.writeLong(r.nextLong()); break;
+        case 4: out.writeDouble(r.nextDouble()); break;
+        case 5: out.writeFloat(r.nextFloat()); break;
+        case 6:
+          byte[] b = new byte[r.nextInt(1024)];
+          // 1:
+          r.nextBytes(b);
+          out.write(b);
+          // 2:
+          r.nextBytes(b);
+          out.write(b);
+          // 3:
+          r.nextBytes(b);
+          out.write(b);
+          break;
+        case 7:
+          out.writeBoolean(r.nextBoolean());
+          break;
+        case 8:
+          out.writeChar((char)r.nextInt());
+          break;
+        case 9:
+          out.writeByte((byte)r.nextInt());
+          break;
+        case 10:
+          out.writeShort((short)r.nextInt());
+          break;
+        case 11:
+          String string = composeString(1024, r);
+          out.writeUTF(string);
+          String string2 = composeString(1024, r);
+          out.writeUTF(string2);
+          break;
+        case 12:
+          byte[] bb = asciiString.getBytes("UTF-8");
+          out.write(bb);
+          break;
+        case 13:
+          out.writeLong(r.nextLong());
+          out.writeLong(r.nextLong());
+          break;
+      }
+    }
+  }
+
+  private static String composeString(int len, Random r) {
+    char[] cc = new char[len];
+    char ch;
+    for (int i = 0; i<len; i++) {
+      do {
+        ch = (char)r.nextInt();
+      } while (!Character.isDefined(ch) 
+          || Character.isHighSurrogate(ch)
+          || Character.isLowSurrogate(ch));
+      cc[i] = ch;
+    }
+    return new String(cc);
+  }
+  
+  /**
+   * Tests methods of {@link NonSyncDataInputBuffer}.
+   * @throws IOException
+   */
+  @Test
+  public void testBaseBuffers() throws IOException {
+    NonSyncDataOutputBuffer dob = new NonSyncDataOutputBuffer();
+    final Random r = new Random();
+    final long seed = 0x0123456789ABCDEFL; // hardcoded for reproducibility.
+    r.setSeed(seed);
+    System.out.println("SEED: " + seed);
+    
+    writeJunk(dob, r, seed, 1000);
+    NonSyncDataInputBuffer dib = new NonSyncDataInputBuffer();
+    dib.reset(dob.getData(), 0, dob.getLength());
+    assertEquals(0, dib.getPosition());
+    assertEquals(dob.getLength(), dib.getLength());
+    readJunk(dib, r, seed, 1000);
+
+    dob.reset();
+    writeJunk(dob, r, seed, 1000);
+    dib.reset(dob.getData(), dob.getLength());
+    assertEquals(0, dib.getPosition());
+    assertEquals(dob.getLength(), dib.getLength());
+    readJunk(dib, r, seed, 1000);
+  }
+  
 }

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java Mon Jul 29 21:08:03 2013
@@ -23,11 +23,13 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.RandomAccessFile;
 import java.io.UnsupportedEncodingException;
+import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
 import java.util.Random;
 
 import junit.framework.TestCase;
+import static org.junit.Assert.*;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -219,6 +221,94 @@ public class TestRCFile extends TestCase
 
     reader.close();
   }
+  
+  /**
+   * Tests {@link RCFile.Reader#getColumn(int, BytesRefArrayWritable) } method.
+   * @throws IOException
+   */
+  public void testGetColumn() throws IOException {
+    fs.delete(file, true);
+
+    RCFileOutputFormat.setColumnNumber(conf, expectedFieldsData.length);
+    RCFile.Writer writer =
+      new RCFile.Writer(fs, conf, file, null,
+                        RCFile.createMetadata(new Text("apple"),
+                                              new Text("block"),
+                                              new Text("cat"),
+                                              new Text("dog")),
+                        new DefaultCodec());
+    
+    byte[][] record_1 = {
+        "123".getBytes("UTF-8"), 
+        "456".getBytes("UTF-8"),
+        "789".getBytes("UTF-8"), 
+        "1000".getBytes("UTF-8"),
+        "5.3".getBytes("UTF-8"), 
+        "hive and hadoop".getBytes("UTF-8"),
+        new byte[0], 
+        "NULL".getBytes("UTF-8") };
+    byte[][] record_2 = {
+        "100".getBytes("UTF-8"), 
+        "200".getBytes("UTF-8"),
+        "123".getBytes("UTF-8"), 
+        "1000".getBytes("UTF-8"),
+        "5.3".getBytes("UTF-8"), 
+        "hive and hadoop".getBytes("UTF-8"),
+        new byte[0], 
+        "NULL".getBytes("UTF-8")};
+    
+    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
+    for (int i = 0; i < record_1.length; i++) {
+      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
+          record_1[i].length);
+      bytes.set(i, cu);
+    }
+    writer.append(bytes);
+    bytes.clear();
+    for (int i = 0; i < record_2.length; i++) {
+      BytesRefWritable cu = new BytesRefWritable(record_2[i], 0,
+          record_2[i].length);
+      bytes.set(i, cu);
+    }
+    writer.append(bytes);
+    writer.close();
+
+    RCFile.Reader reader = new RCFile.Reader(fs, file, conf);
+    
+    LongWritable rowID = new LongWritable();
+    assertTrue(reader.next(rowID));
+    assertEquals(rowID.get(), 0L);
+    
+    assertTrue(reader.next(rowID));
+    assertEquals(rowID.get(), 1L);
+    
+    BytesRefArrayWritable result = null;
+    BytesRefWritable brw;
+    for (int col=0; col < 8; col++) {
+      BytesRefArrayWritable result2 = reader.getColumn(col, result);
+      if (result == null) {
+        assertNotNull(result2);
+        result = result2;
+      } else {
+        // #getColumn(2) should return the instance passed in: 
+        assertSame(result2, result);
+      }
+      // each column has height of 2: 
+      assertEquals(2, result.size());
+      for (int row=0; row<result.size(); row++) {
+        brw = result.get(row);
+        int start = brw.getStart();
+        int len = brw.getLength();
+        byte[] actualData = Arrays.copyOfRange(brw.getData(), start, start + len);
+        byte[] expectedData = (row == 0) ? record_1[col] : record_2[col];
+        assertArrayEquals("col="+col+" : row="+row,  expectedData, actualData);
+      }
+      
+      result.clear();
+    }
+    
+    reader.close();
+  }
 
   public void testReadCorruptFile() throws IOException, SerDeException {
     fs.delete(file, true);

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java Mon Jul 29 21:08:03 2013
@@ -20,9 +20,6 @@ package org.apache.hadoop.hive.ql.io;
 import java.io.File;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
-import java.io.Serializable;
-import java.net.URL;
-import java.net.URLClassLoader;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -35,20 +32,12 @@ import org.apache.hadoop.fs.ContentSumma
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStore;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.QueryPlan;
-import org.apache.hadoop.hive.ql.exec.ExecDriver;
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
-import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.parse.ParseDriver;
-import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
@@ -57,7 +46,6 @@ import org.apache.hadoop.mapred.InputSpl
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
@@ -178,12 +166,15 @@ public class TestSymlinkTextInputFormat 
       
       QueryPlan plan = drv.getPlan();
       MapRedTask selectTask = (MapRedTask)plan.getRootTasks().get(0);
-      
-      ExecDriver.addInputPaths(newJob, selectTask.getWork(), emptyScratchDir.toString(), ctx);
+
+      ExecDriver.addInputPaths(newJob, selectTask.getWork().getMapWork(), emptyScratchDir.toString(), ctx);
       Utilities.setMapRedWork(newJob, selectTask.getWork(), ctx.getMRTmpFileURI());
       
       CombineHiveInputFormat combineInputFormat = ReflectionUtils.newInstance(
           CombineHiveInputFormat.class, newJob);
+      
+      combineInputFormat.validateInput(newJob);
+      
       InputSplit[] retSplits = combineInputFormat.getSplits(newJob, 1);
       assertEquals(1, retSplits.length);
     } catch (Exception e) {

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java Mon Jul 29 21:08:03 2013
@@ -43,7 +43,7 @@ public class TestFileDump {
   Path workDir = new Path(System.getProperty("test.tmp.dir",
       "target" + File.separator + "test" + File.separator + "tmp"));
   Path resourceDir = new Path(System.getProperty("test.build.resources",
-      "src" + File.separator + "test" + File.separator + "resources"));
+      "ql" + File.separator + "src" + File.separator + "test" + File.separator + "resources"));
 
   Configuration conf;
   FileSystem fs;

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestEmbeddedLockManager.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestEmbeddedLockManager.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestEmbeddedLockManager.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestEmbeddedLockManager.java Mon Jul 29 21:08:03 2013
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.lockmgr;
 
 import junit.framework.TestCase;
+
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData;
 import org.junit.Assert;

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java Mon Jul 29 21:08:03 2013
@@ -38,8 +38,7 @@ public class DummyContextUDF extends Gen
 
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
     Reporter reporter = context.getReporter();
-    Counters.Counter counter = reporter.getCounter(
-        "org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS");
+    Counters.Counter counter = reporter.getCounter("org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS");
     result.set(counter.getValue());
     return result;
   }
@@ -49,7 +48,7 @@ public class DummyContextUDF extends Gen
   }
 
   @Override
-  public void configure(MapredContext context) {
+    public void configure(MapredContext context) {
     this.context = context;
   }
 }

Modified: hive/branches/tez/ql/src/test/queries/clientnegative/invalid_t_alter1.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientnegative/invalid_t_alter1.q?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientnegative/invalid_t_alter1.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientnegative/invalid_t_alter1.q Mon Jul 29 21:08:03 2013
@@ -1,2 +1,2 @@
 CREATE TABLE alter_test (d STRING);
-ALTER TABLE alter_test CHANGE d d DATE;
+ALTER TABLE alter_test CHANGE d d DATETIME;

Modified: hive/branches/tez/ql/src/test/queries/clientnegative/invalid_t_alter2.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientnegative/invalid_t_alter2.q?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientnegative/invalid_t_alter2.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientnegative/invalid_t_alter2.q Mon Jul 29 21:08:03 2013
@@ -1,2 +1,2 @@
 CREATE TABLE alter_test (d STRING);
-ALTER TABLE alter_test ADD COLUMNS (ds DATE);
+ALTER TABLE alter_test ADD COLUMNS (ds DATETIME);

Modified: hive/branches/tez/ql/src/test/queries/clientnegative/invalid_t_transform.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientnegative/invalid_t_transform.q?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientnegative/invalid_t_transform.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientnegative/invalid_t_transform.q Mon Jul 29 21:08:03 2013
@@ -1 +1 @@
-SELECT TRANSFORM(*) USING 'cat' AS (key DATE) FROM src;
+SELECT TRANSFORM(*) USING 'cat' AS (key DATETIME) FROM src;

Modified: hive/branches/tez/ql/src/test/queries/clientnegative/serde_regex.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientnegative/serde_regex.q?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientnegative/serde_regex.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientnegative/serde_regex.q Mon Jul 29 21:08:03 2013
@@ -1,5 +1,5 @@
 USE default;
---  This should fail because Regex SerDe doesn't support TIMESTAMP, STRUCT
+--  This should fail because Regex SerDe doesn't support STRUCT
 CREATE TABLE serde_regex(
   host STRING,
   identity STRING,

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/auto_sortmerge_join_6.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/auto_sortmerge_join_6.q?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/auto_sortmerge_join_6.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/auto_sortmerge_join_6.q Mon Jul 29 21:08:03 2013
@@ -19,10 +19,29 @@ set hive.auto.convert.join=true;
 set hive.auto.convert.join.noconditionaltask=true;
 set hive.auto.convert.join.noconditionaltask.size=200;
 set hive.auto.convert.sortmerge.join.to.mapjoin=false;
+
 -- A SMB join is being followed by a regular join on a non-bucketed table on a different key
+
+-- Three tests below are all the same query with different alias, which changes dispatch order of GenMapRedWalker
+-- This is dependent to iteration order of HashMap, so can be meaningless in non-sun jdk
+-- b = TS[0]-OP[13]-MAPJOIN[11]-RS[6]-JOIN[8]-SEL[9]-FS[10]
+-- c = TS[1]-RS[7]-JOIN[8]
+-- a = TS[2]-MAPJOIN[11]
 explain select count(*) FROM tbl1 a JOIN tbl2 b ON a.key = b.key join src c on c.value = a.value;
 select count(*) FROM tbl1 a JOIN tbl2 b ON a.key = b.key join src c on c.value = a.value;
 
+-- d = TS[0]-RS[7]-JOIN[8]-SEL[9]-FS[10]
+-- b = TS[1]-OP[13]-MAPJOIN[11]-RS[6]-JOIN[8]
+-- a = TS[2]-MAPJOIN[11]
+explain select count(*) FROM tbl1 a JOIN tbl2 b ON a.key = b.key join src d on d.value = a.value;
+select count(*) FROM tbl1 a JOIN tbl2 b ON a.key = b.key join src d on d.value = a.value;
+
+-- b = TS[0]-OP[13]-MAPJOIN[11]-RS[6]-JOIN[8]-SEL[9]-FS[10]
+-- a = TS[1]-MAPJOIN[11]
+-- h = TS[2]-RS[7]-JOIN[8]
+explain select count(*) FROM tbl1 a JOIN tbl2 b ON a.key = b.key join src h on h.value = a.value;
+select count(*) FROM tbl1 a JOIN tbl2 b ON a.key = b.key join src h on h.value = a.value;
+
 -- A SMB join is being followed by a regular join on a non-bucketed table on the same key
 explain select count(*) FROM tbl1 a JOIN tbl2 b ON a.key = b.key join src c on c.key = a.key;
 select count(*) FROM tbl1 a JOIN tbl2 b ON a.key = b.key join src c on c.key = a.key;

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/groupby_complex_types.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/groupby_complex_types.q?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/groupby_complex_types.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/groupby_complex_types.q Mon Jul 29 21:08:03 2013
@@ -1,16 +1,19 @@
-
 CREATE TABLE DEST1(key ARRAY<STRING>, value BIGINT) STORED AS TEXTFILE;
 CREATE TABLE DEST2(key MAP<STRING, STRING>, value BIGINT) STORED AS TEXTFILE;
+CREATE TABLE DEST3(key STRUCT<col1:STRING, col2:STRING>, value BIGINT) STORED AS TEXTFILE;
 
 EXPLAIN
 FROM SRC
 INSERT OVERWRITE TABLE DEST1 SELECT ARRAY(SRC.key), COUNT(1) GROUP BY ARRAY(SRC.key)
-INSERT OVERWRITE TABLE DEST2 SELECT MAP(SRC.key, SRC.value), COUNT(1) GROUP BY MAP(SRC.key, SRC.value);
+INSERT OVERWRITE TABLE DEST2 SELECT MAP(SRC.key, SRC.value), COUNT(1) GROUP BY MAP(SRC.key, SRC.value)
+INSERT OVERWRITE TABLE DEST3 SELECT STRUCT(SRC.key, SRC.value), COUNT(1) GROUP BY STRUCT(SRC.key, SRC.value);
 
 FROM SRC
 INSERT OVERWRITE TABLE DEST1 SELECT ARRAY(SRC.key), COUNT(1) GROUP BY ARRAY(SRC.key)
-INSERT OVERWRITE TABLE DEST2 SELECT MAP(SRC.key, SRC.value), COUNT(1) GROUP BY MAP(SRC.key, SRC.value);
+INSERT OVERWRITE TABLE DEST2 SELECT MAP(SRC.key, SRC.value), COUNT(1) GROUP BY MAP(SRC.key, SRC.value)
+INSERT OVERWRITE TABLE DEST3 SELECT STRUCT(SRC.key, SRC.value), COUNT(1) GROUP BY STRUCT(SRC.key, SRC.value);
 
 SELECT DEST1.* FROM DEST1;
 SELECT DEST2.* FROM DEST2;
+SELECT DEST3.* FROM DEST3;
 

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/load_hdfs_file_with_space_in_the_name.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/load_hdfs_file_with_space_in_the_name.q?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/load_hdfs_file_with_space_in_the_name.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/load_hdfs_file_with_space_in_the_name.q Mon Jul 29 21:08:03 2013
@@ -1,9 +1,9 @@
-dfs -mkdir hdfs:///tmp/test/load_file_with_space_in_the_name;
+dfs -mkdir hdfs:///tmp/test/;
 
-dfs -copyFromLocal ../data/files hdfs:///tmp/test/load_file_with_space_in_the_name;
+dfs -copyFromLocal ../data/files hdfs:///tmp/test/.;
 
 CREATE TABLE load_file_with_space_in_the_name(name STRING, age INT);
-LOAD DATA INPATH 'hdfs:///tmp/test/load_file_with_space_in_the_name/files/person age.txt' INTO TABLE load_file_with_space_in_the_name;
+LOAD DATA INPATH 'hdfs:///tmp/test/files/person age.txt' INTO TABLE load_file_with_space_in_the_name;
 
 dfs -rmr hdfs:///tmp/test;
 

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/pcr.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/pcr.q?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/pcr.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/pcr.q Mon Jul 29 21:08:03 2013
@@ -102,3 +102,40 @@ select key, value, ds, hr from srcpart w
 drop table pcr_t1;
 drop table pcr_t2;
 drop table pcr_t3;
+
+
+-- Test cases when a non-boolean ds expression has same and different values for all possible ds values: 
+drop table pcr_foo;
+create table pcr_foo (key int, value string) partitioned by (ds int);
+insert overwrite table pcr_foo partition (ds=3) select * from src where key < 10 order by key;
+insert overwrite table pcr_foo partition (ds=5) select * from src where key < 10 order by key;
+insert overwrite table pcr_foo partition (ds=7) select * from src where key < 10 order by key;
+
+-- the condition is 'true' for all the 3 partitions (ds=3,5,7):
+select key, value, ds from pcr_foo where (ds % 2 == 1);
+
+-- the condition is 'true' for partitions (ds=3,5) but 'false' of partition ds=7:
+select key, value, ds from pcr_foo where (ds / 3 < 2);
+
+drop table pcr_foo;
+
+
+
+-- Cover org.apache.hadoop.hive.ql.optimizer.pcr.PcrExprProcFactory.FieldExprProcessor.
+-- Create a table with a struct data:
+create table ab(strct struct<a:int, b:string>)
+row format delimited
+  fields terminated by '\t'
+  collection items terminated by '\001';
+load data local inpath '../data/files/kv1.txt'
+overwrite into table ab;
+
+-- Create partitioned table with struct data:
+drop table foo_field;
+create table foo_field (s struct<a:int,b:string>) partitioned by (ds int);
+insert overwrite table foo_field partition (ds=5) select strct from ab where strct.a < 10 limit 2;
+insert overwrite table foo_field partition (ds=7) select strct from ab where strct.a > 190 limit 2;
+select s,ds from foo_field where ((ds + s.a) > 0) order by ds,s;
+
+drop table foo_field;
+

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/ptf_npath.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/ptf_npath.q?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/ptf_npath.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/ptf_npath.q Mon Jul 29 21:08:03 2013
@@ -27,7 +27,7 @@ from npath(on 
 select origin_city_name, fl_num, year, month, day_of_month, sz, tpath 
 from npath(on 
         flights_tiny 
-        sort by year, month, day_of_month  
+        sort by fl_num, year, month, day_of_month  
       arg1('LATE.LATE+'), 
       arg2('LATE'), arg3(arr_delay > 15), 
     arg4('origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath[0].day_of_month as tpath') 

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/serde_reported_schema.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/serde_reported_schema.q?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/serde_reported_schema.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/serde_reported_schema.q Mon Jul 29 21:08:03 2013
@@ -4,6 +4,6 @@ create table int_string
     with serdeproperties (
       "serialization.class"="org.apache.hadoop.hive.serde2.thrift.test.IntString",
       "serialization.format"="org.apache.thrift.protocol.TBinaryProtocol");
-describe int_string;
+describe extended int_string;
 alter table int_string add partition (b='part1');
-describe int_string partition (b='part1');
\ No newline at end of file
+describe extended int_string partition (b='part1');

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/udf_inline.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/udf_inline.q?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/udf_inline.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/udf_inline.q Mon Jul 29 21:08:03 2013
@@ -16,3 +16,11 @@ SELECT inline( 
   )
 )  as (id, text) FROM SRC limit 2;
 
+-- HIVE-3475 INLINE UDTF doesn't convert types properly
+select * from (SELECT
+  ARRAY(
+    STRUCT (1,'dude!'),
+    STRUCT (2,'Wheres'),
+    STRUCT (3,'my car?')
+  ) as value FROM SRC limit 1) input
+ LATERAL VIEW inline(value) myTable AS id, text;

Modified: hive/branches/tez/ql/src/test/resources/orc-file-dump.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/resources/orc-file-dump.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/resources/orc-file-dump.out (original)
+++ hive/branches/tez/ql/src/test/resources/orc-file-dump.out Mon Jul 29 21:08:03 2013
@@ -11,87 +11,72 @@ Statistics:
   Column 3: count: 21000 min: Darkness, max: worst
 
 Stripes:
-  Stripe: offset: 3 data: 69638 rows: 5000 tail: 85 index: 126
+  Stripe: offset: 3 data: 69605 rows: 5000 tail: 72 index: 119
     Stream: column 0 section ROW_INDEX start: 3 length 10
-    Stream: column 1 section ROW_INDEX start: 13 length 38
-    Stream: column 2 section ROW_INDEX start: 51 length 42
-    Stream: column 3 section ROW_INDEX start: 93 length 36
-    Stream: column 1 section PRESENT start: 129 length 11
-    Stream: column 1 section DATA start: 140 length 22605
-    Stream: column 2 section PRESENT start: 22745 length 11
-    Stream: column 2 section DATA start: 22756 length 43426
-    Stream: column 3 section PRESENT start: 66182 length 11
-    Stream: column 3 section DATA start: 66193 length 3403
-    Stream: column 3 section LENGTH start: 69596 length 38
-    Stream: column 3 section DICTIONARY_DATA start: 69634 length 133
+    Stream: column 1 section ROW_INDEX start: 13 length 35
+    Stream: column 2 section ROW_INDEX start: 48 length 39
+    Stream: column 3 section ROW_INDEX start: 87 length 35
+    Stream: column 1 section DATA start: 122 length 22605
+    Stream: column 2 section DATA start: 22727 length 43426
+    Stream: column 3 section DATA start: 66153 length 3403
+    Stream: column 3 section LENGTH start: 69556 length 38
+    Stream: column 3 section DICTIONARY_DATA start: 69594 length 133
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT
     Encoding column 2: DIRECT
     Encoding column 3: DICTIONARY[35]
-  Stripe: offset: 69852 data: 69617 rows: 5000 tail: 83 index: 124
-    Stream: column 0 section ROW_INDEX start: 69852 length 10
-    Stream: column 1 section ROW_INDEX start: 69862 length 36
-    Stream: column 2 section ROW_INDEX start: 69898 length 42
-    Stream: column 3 section ROW_INDEX start: 69940 length 36
-    Stream: column 1 section PRESENT start: 69976 length 11
-    Stream: column 1 section DATA start: 69987 length 22597
-    Stream: column 2 section PRESENT start: 92584 length 11
-    Stream: column 2 section DATA start: 92595 length 43439
-    Stream: column 3 section PRESENT start: 136034 length 11
-    Stream: column 3 section DATA start: 136045 length 3377
-    Stream: column 3 section LENGTH start: 139422 length 38
-    Stream: column 3 section DICTIONARY_DATA start: 139460 length 133
+  Stripe: offset: 69799 data: 69584 rows: 5000 tail: 73 index: 118
+    Stream: column 0 section ROW_INDEX start: 69799 length 10
+    Stream: column 1 section ROW_INDEX start: 69809 length 34
+    Stream: column 2 section ROW_INDEX start: 69843 length 39
+    Stream: column 3 section ROW_INDEX start: 69882 length 35
+    Stream: column 1 section DATA start: 69917 length 22597
+    Stream: column 2 section DATA start: 92514 length 43439
+    Stream: column 3 section DATA start: 135953 length 3377
+    Stream: column 3 section LENGTH start: 139330 length 38
+    Stream: column 3 section DICTIONARY_DATA start: 139368 length 133
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT
     Encoding column 2: DIRECT
     Encoding column 3: DICTIONARY[35]
-  Stripe: offset: 139676 data: 69603 rows: 5000 tail: 85 index: 127
-    Stream: column 0 section ROW_INDEX start: 139676 length 10
-    Stream: column 1 section ROW_INDEX start: 139686 length 39
-    Stream: column 2 section ROW_INDEX start: 139725 length 42
-    Stream: column 3 section ROW_INDEX start: 139767 length 36
-    Stream: column 1 section PRESENT start: 139803 length 11
-    Stream: column 1 section DATA start: 139814 length 22594
-    Stream: column 2 section PRESENT start: 162408 length 11
-    Stream: column 2 section DATA start: 162419 length 43415
-    Stream: column 3 section PRESENT start: 205834 length 11
-    Stream: column 3 section DATA start: 205845 length 3390
-    Stream: column 3 section LENGTH start: 209235 length 38
-    Stream: column 3 section DICTIONARY_DATA start: 209273 length 133
+  Stripe: offset: 139574 data: 69570 rows: 5000 tail: 73 index: 120
+    Stream: column 0 section ROW_INDEX start: 139574 length 10
+    Stream: column 1 section ROW_INDEX start: 139584 length 36
+    Stream: column 2 section ROW_INDEX start: 139620 length 39
+    Stream: column 3 section ROW_INDEX start: 139659 length 35
+    Stream: column 1 section DATA start: 139694 length 22594
+    Stream: column 2 section DATA start: 162288 length 43415
+    Stream: column 3 section DATA start: 205703 length 3390
+    Stream: column 3 section LENGTH start: 209093 length 38
+    Stream: column 3 section DICTIONARY_DATA start: 209131 length 133
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT
     Encoding column 2: DIRECT
     Encoding column 3: DICTIONARY[35]
-  Stripe: offset: 209491 data: 69584 rows: 5000 tail: 84 index: 126
-    Stream: column 0 section ROW_INDEX start: 209491 length 10
-    Stream: column 1 section ROW_INDEX start: 209501 length 38
-    Stream: column 2 section ROW_INDEX start: 209539 length 42
-    Stream: column 3 section ROW_INDEX start: 209581 length 36
-    Stream: column 1 section PRESENT start: 209617 length 11
-    Stream: column 1 section DATA start: 209628 length 22575
-    Stream: column 2 section PRESENT start: 232203 length 11
-    Stream: column 2 section DATA start: 232214 length 43426
-    Stream: column 3 section PRESENT start: 275640 length 11
-    Stream: column 3 section DATA start: 275651 length 3379
-    Stream: column 3 section LENGTH start: 279030 length 38
-    Stream: column 3 section DICTIONARY_DATA start: 279068 length 133
+  Stripe: offset: 209337 data: 69551 rows: 5000 tail: 72 index: 119
+    Stream: column 0 section ROW_INDEX start: 209337 length 10
+    Stream: column 1 section ROW_INDEX start: 209347 length 35
+    Stream: column 2 section ROW_INDEX start: 209382 length 39
+    Stream: column 3 section ROW_INDEX start: 209421 length 35
+    Stream: column 1 section DATA start: 209456 length 22575
+    Stream: column 2 section DATA start: 232031 length 43426
+    Stream: column 3 section DATA start: 275457 length 3379
+    Stream: column 3 section LENGTH start: 278836 length 38
+    Stream: column 3 section DICTIONARY_DATA start: 278874 length 133
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT
     Encoding column 2: DIRECT
     Encoding column 3: DICTIONARY[35]
-  Stripe: offset: 279285 data: 14111 rows: 1000 tail: 80 index: 127
-    Stream: column 0 section ROW_INDEX start: 279285 length 10
-    Stream: column 1 section ROW_INDEX start: 279295 length 39
-    Stream: column 2 section ROW_INDEX start: 279334 length 42
-    Stream: column 3 section ROW_INDEX start: 279376 length 36
-    Stream: column 1 section PRESENT start: 279412 length 5
-    Stream: column 1 section DATA start: 279417 length 4529
-    Stream: column 2 section PRESENT start: 283946 length 5
-    Stream: column 2 section DATA start: 283951 length 8690
-    Stream: column 3 section PRESENT start: 292641 length 5
-    Stream: column 3 section DATA start: 292646 length 706
-    Stream: column 3 section LENGTH start: 293352 length 38
-    Stream: column 3 section DICTIONARY_DATA start: 293390 length 133
+  Stripe: offset: 279079 data: 14096 rows: 1000 tail: 68 index: 120
+    Stream: column 0 section ROW_INDEX start: 279079 length 10
+    Stream: column 1 section ROW_INDEX start: 279089 length 36
+    Stream: column 2 section ROW_INDEX start: 279125 length 39
+    Stream: column 3 section ROW_INDEX start: 279164 length 35
+    Stream: column 1 section DATA start: 279199 length 4529
+    Stream: column 2 section DATA start: 283728 length 8690
+    Stream: column 3 section DATA start: 292418 length 706
+    Stream: column 3 section LENGTH start: 293124 length 38
+    Stream: column 3 section DICTIONARY_DATA start: 293162 length 133
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT
     Encoding column 2: DIRECT

Modified: hive/branches/tez/ql/src/test/results/beelinepositive/auto_join0.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/beelinepositive/auto_join0.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/beelinepositive/auto_join0.q.out (original)
+++ hive/branches/tez/ql/src/test/results/beelinepositive/auto_join0.q.out Mon Jul 29 21:08:03 2013
@@ -45,7 +45,7 @@ SORT BY k1, v1, k2, v2 
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -72,7 +72,7 @@ SORT BY k1, v1, k2, v2 
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -205,7 +205,7 @@ SORT BY k1, v1, k2, v2 
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -232,7 +232,7 @@ SORT BY k1, v1, k2, v2 
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -281,7 +281,7 @@ SORT BY k1, v1, k2, v2 
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -303,7 +303,7 @@ SORT BY k1, v1, k2, v2 
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'

Modified: hive/branches/tez/ql/src/test/results/beelinepositive/auto_join12.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/beelinepositive/auto_join12.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/beelinepositive/auto_join12.q.out (original)
+++ hive/branches/tez/ql/src/test/results/beelinepositive/auto_join12.q.out Mon Jul 29 21:08:03 2013
@@ -51,7 +51,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 100.0) and (key < 80.0))'
+'                  expr: ((key < 100) and (key < 80))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -76,7 +76,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 80.0) and (key < 100.0))'
+'                  expr: ((key < 80) and (key < 100))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -103,7 +103,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 100.0) and (key < 80.0))'
+'                  expr: ((key < 100) and (key < 80))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -191,7 +191,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 100.0) and (key < 80.0))'
+'                  expr: ((key < 100) and (key < 80))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -214,7 +214,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 80.0) and (key < 100.0))'
+'                  expr: ((key < 80) and (key < 100))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -241,7 +241,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 100.0) and (key < 80.0))'
+'                  expr: ((key < 100) and (key < 80))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -302,7 +302,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 100.0) and (key < 80.0))'
+'                  expr: ((key < 100) and (key < 80))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -325,7 +325,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 100.0) and (key < 80.0))'
+'                  expr: ((key < 100) and (key < 80))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -354,7 +354,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 80.0) and (key < 100.0))'
+'                  expr: ((key < 80) and (key < 100))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -406,7 +406,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 100.0) and (key < 80.0))'
+'                  expr: ((key < 100) and (key < 80))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -430,7 +430,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 100.0) and (key < 80.0))'
+'                  expr: ((key < 100) and (key < 80))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -456,7 +456,7 @@ ON src1.c1 = src3.c5 AND src3.c5 < 80;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 80.0) and (key < 100.0))'
+'                  expr: ((key < 80) and (key < 100))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'

Modified: hive/branches/tez/ql/src/test/results/beelinepositive/auto_join13.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/beelinepositive/auto_join13.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/beelinepositive/auto_join13.q.out (original)
+++ hive/branches/tez/ql/src/test/results/beelinepositive/auto_join13.q.out Mon Jul 29 21:08:03 2013
@@ -50,7 +50,7 @@ ON src1.c1 + src2.c3 = src3.c5 AND src3.
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 100.0)'
+'                  expr: (key < 100)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -77,7 +77,7 @@ ON src1.c1 + src2.c3 = src3.c5 AND src3.
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 100.0)'
+'                  expr: (key < 100)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -120,7 +120,7 @@ ON src1.c1 + src2.c3 = src3.c5 AND src3.
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 200.0)'
+'                  expr: (key < 200)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -230,7 +230,7 @@ ON src1.c1 + src2.c3 = src3.c5 AND src3.
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 200.0)'
+'                  expr: (key < 200)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -294,7 +294,7 @@ ON src1.c1 + src2.c3 = src3.c5 AND src3.
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 200.0)'
+'                  expr: (key < 200)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -351,7 +351,7 @@ ON src1.c1 + src2.c3 = src3.c5 AND src3.
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 100.0)'
+'                  expr: (key < 100)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -376,7 +376,7 @@ ON src1.c1 + src2.c3 = src3.c5 AND src3.
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 100.0)'
+'                  expr: (key < 100)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -414,7 +414,7 @@ ON src1.c1 + src2.c3 = src3.c5 AND src3.
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 100.0)'
+'                  expr: (key < 100)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -438,7 +438,7 @@ ON src1.c1 + src2.c3 = src3.c5 AND src3.
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 100.0)'
+'                  expr: (key < 100)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'

Modified: hive/branches/tez/ql/src/test/results/beelinepositive/auto_join16.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/beelinepositive/auto_join16.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/beelinepositive/auto_join16.q.out (original)
+++ hive/branches/tez/ql/src/test/results/beelinepositive/auto_join16.q.out Mon Jul 29 21:08:03 2013
@@ -42,7 +42,7 @@ where tab.value < 200;
 '            alias: tab'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key > 20.0) and (value < 200.0))'
+'                  expr: ((key > 20) and (value < 200))'
 '                  type: boolean'
 '              HashTable Sink Operator'
 '                condition expressions:'
@@ -62,7 +62,7 @@ where tab.value < 200;
 '            alias: a'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (((key > 10.0) and (key > 20.0)) and (value < 200.0))'
+'                  expr: (((key > 10) and (key > 20)) and (value < 200))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -146,7 +146,7 @@ where tab.value < 200;
 '            alias: a'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (((key > 10.0) and (key > 20.0)) and (value < 200.0))'
+'                  expr: (((key > 10) and (key > 20)) and (value < 200))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -173,7 +173,7 @@ where tab.value < 200;
 '            alias: tab'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key > 20.0) and (value < 200.0))'
+'                  expr: ((key > 20) and (value < 200))'
 '                  type: boolean'
 '              Map Join Operator'
 '                condition map:'
@@ -217,7 +217,7 @@ where tab.value < 200;
 '            alias: a'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (((key > 10.0) and (key > 20.0)) and (value < 200.0))'
+'                  expr: (((key > 10) and (key > 20)) and (value < 200))'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -247,7 +247,7 @@ where tab.value < 200;
 '            alias: tab'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key > 20.0) and (value < 200.0))'
+'                  expr: ((key > 20) and (value < 200))'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                key expressions:'

Modified: hive/branches/tez/ql/src/test/results/beelinepositive/auto_join20.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/beelinepositive/auto_join20.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/beelinepositive/auto_join20.q.out (original)
+++ hive/branches/tez/ql/src/test/results/beelinepositive/auto_join20.q.out Mon Jul 29 21:08:03 2013
@@ -42,7 +42,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '            alias: src1'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              HashTable Sink Operator'
 '                condition expressions:'
@@ -52,7 +52,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '                filter predicates:'
 '                  0 '
 '                  1 '
-'                  2 {(key < 20.0)}'
+'                  2 {(key < 20)}'
 '                handleSkewJoin: false'
 '                keys:'
 '                  0 [Column[key]]'
@@ -64,7 +64,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '            alias: src2'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              HashTable Sink Operator'
 '                condition expressions:'
@@ -74,7 +74,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '                filter predicates:'
 '                  0 '
 '                  1 '
-'                  2 {(key < 20.0)}'
+'                  2 {(key < 20)}'
 '                handleSkewJoin: false'
 '                keys:'
 '                  0 [Column[key]]'
@@ -99,7 +99,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '              filter predicates:'
 '                0 '
 '                1 '
-'                2 {(key < 20.0)}'
+'                2 {(key < 20)}'
 '              handleSkewJoin: false'
 '              keys:'
 '                0 [Column[key]]'
@@ -231,7 +231,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '            alias: src1'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                key expressions:'
@@ -252,7 +252,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '            alias: src2'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                key expressions:'
@@ -297,7 +297,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '          filter predicates:'
 '            0 '
 '            1 '
-'            2 {(VALUE._col0 < 20.0)}'
+'            2 {(VALUE._col0 < 20)}'
 '          handleSkewJoin: false'
 '          outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9'
 '          Select Operator'
@@ -378,7 +378,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '            alias: src1'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 10.0) and (key < 15.0))'
+'                  expr: ((key < 10) and (key < 15))'
 '                  type: boolean'
 '              HashTable Sink Operator'
 '                condition expressions:'
@@ -388,7 +388,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '                filter predicates:'
 '                  0 '
 '                  1 '
-'                  2 {(key < 20.0)}'
+'                  2 {(key < 20)}'
 '                handleSkewJoin: false'
 '                keys:'
 '                  0 [Column[key]]'
@@ -400,7 +400,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '            alias: src2'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 15.0) and (key < 10.0))'
+'                  expr: ((key < 15) and (key < 10))'
 '                  type: boolean'
 '              HashTable Sink Operator'
 '                condition expressions:'
@@ -410,7 +410,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '                filter predicates:'
 '                  0 '
 '                  1 '
-'                  2 {(key < 20.0)}'
+'                  2 {(key < 20)}'
 '                handleSkewJoin: false'
 '                keys:'
 '                  0 [Column[key]]'
@@ -435,7 +435,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '              filter predicates:'
 '                0 '
 '                1 '
-'                2 {(key < 20.0)}'
+'                2 {(key < 20)}'
 '              handleSkewJoin: false'
 '              keys:'
 '                0 [Column[key]]'
@@ -567,7 +567,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '            alias: src1'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 10.0) and (key < 15.0))'
+'                  expr: ((key < 10) and (key < 15))'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                key expressions:'
@@ -588,7 +588,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '            alias: src2'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 15.0) and (key < 10.0))'
+'                  expr: ((key < 15) and (key < 10))'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                key expressions:'
@@ -633,7 +633,7 @@ SORT BY k1,v1,k2,v2,k3,v3 
 '          filter predicates:'
 '            0 '
 '            1 '
-'            2 {(VALUE._col0 < 20.0)}'
+'            2 {(VALUE._col0 < 20)}'
 '          handleSkewJoin: false'
 '          outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9'
 '          Select Operator'

Modified: hive/branches/tez/ql/src/test/results/beelinepositive/auto_join21.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/beelinepositive/auto_join21.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/beelinepositive/auto_join21.q.out (original)
+++ hive/branches/tez/ql/src/test/results/beelinepositive/auto_join21.q.out Mon Jul 29 21:08:03 2013
@@ -39,9 +39,9 @@ SELECT * FROM src src1 LEFT OUTER JOIN s
 '                1 {key} {value}'
 '                2 {key} {value}'
 '              filter predicates:'
-'                0 {(key < 10.0)}'
+'                0 {(key < 10)}'
 '                1 '
-'                2 {(key < 10.0)}'
+'                2 {(key < 10)}'
 '              handleSkewJoin: false'
 '              keys:'
 '                0 [Column[key]]'
@@ -53,7 +53,7 @@ SELECT * FROM src src1 LEFT OUTER JOIN s
 '            alias: src2'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key > 10.0)'
+'                  expr: (key > 10)'
 '                  type: boolean'
 '              HashTable Sink Operator'
 '                condition expressions:'
@@ -61,9 +61,9 @@ SELECT * FROM src src1 LEFT OUTER JOIN s
 '                  1 {key} {value}'
 '                  2 {key} {value}'
 '                filter predicates:'
-'                  0 {(key < 10.0)}'
+'                  0 {(key < 10)}'
 '                  1 '
-'                  2 {(key < 10.0)}'
+'                  2 {(key < 10)}'
 '                handleSkewJoin: false'
 '                keys:'
 '                  0 [Column[key]]'
@@ -86,9 +86,9 @@ SELECT * FROM src src1 LEFT OUTER JOIN s
 '                1 {key} {value}'
 '                2 {key} {value}'
 '              filter predicates:'
-'                0 {(key < 10.0)}'
+'                0 {(key < 10)}'
 '                1 '
-'                2 {(key < 10.0)}'
+'                2 {(key < 10)}'
 '              handleSkewJoin: false'
 '              keys:'
 '                0 [Column[key]]'
@@ -187,7 +187,7 @@ SELECT * FROM src src1 LEFT OUTER JOIN s
 '            alias: src2'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key > 10.0)'
+'                  expr: (key > 10)'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                key expressions:'
@@ -230,9 +230,9 @@ SELECT * FROM src src1 LEFT OUTER JOIN s
 '            1 {VALUE._col0} {VALUE._col1}'
 '            2 {VALUE._col0} {VALUE._col1}'
 '          filter predicates:'
-'            0 {(VALUE._col0 < 10.0)}'
+'            0 {(VALUE._col0 < 10)}'
 '            1 '
-'            2 {(VALUE._col0 < 10.0)}'
+'            2 {(VALUE._col0 < 10)}'
 '          handleSkewJoin: false'
 '          outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9'
 '          Select Operator'

Modified: hive/branches/tez/ql/src/test/results/beelinepositive/auto_join23.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/beelinepositive/auto_join23.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/beelinepositive/auto_join23.q.out (original)
+++ hive/branches/tez/ql/src/test/results/beelinepositive/auto_join23.q.out Mon Jul 29 21:08:03 2013
@@ -35,7 +35,7 @@ SELECT  *  FROM src src1 JOIN src src2 W
 '            alias: src2'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              HashTable Sink Operator'
 '                condition expressions:'
@@ -55,7 +55,7 @@ SELECT  *  FROM src src1 JOIN src src2 W
 '            alias: src1'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Map Join Operator'
 '                condition map:'
@@ -135,7 +135,7 @@ SELECT  *  FROM src src1 JOIN src src2 W
 '            alias: src1'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              HashTable Sink Operator'
 '                condition expressions:'
@@ -155,7 +155,7 @@ SELECT  *  FROM src src1 JOIN src src2 W
 '            alias: src2'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Map Join Operator'
 '                condition map:'
@@ -197,7 +197,7 @@ SELECT  *  FROM src src1 JOIN src src2 W
 '            alias: src1'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                sort order: '
@@ -212,7 +212,7 @@ SELECT  *  FROM src src1 JOIN src src2 W
 '            alias: src2'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                sort order: '

Modified: hive/branches/tez/ql/src/test/results/beelinepositive/auto_join27.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/beelinepositive/auto_join27.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/beelinepositive/auto_join27.q.out (original)
+++ hive/branches/tez/ql/src/test/results/beelinepositive/auto_join27.q.out Mon Jul 29 21:08:03 2013
@@ -38,7 +38,7 @@ ON src_12.key = src3.k AND src3.k < 200;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 200.0)'
+'                  expr: (key < 200)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -108,7 +108,7 @@ ON src_12.key = src3.k AND src3.k < 200;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 200.0)'
+'                  expr: (key < 200)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -160,7 +160,7 @@ ON src_12.key = src3.k AND src3.k < 200;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 200.0)'
+'                  expr: (key < 200)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -253,7 +253,7 @@ ON src_12.key = src3.k AND src3.k < 200;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 200.0)'
+'                  expr: (key < 200)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -281,7 +281,7 @@ ON src_12.key = src3.k AND src3.k < 200;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 200.0)'
+'                  expr: (key < 200)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -335,7 +335,7 @@ ON src_12.key = src3.k AND src3.k < 200;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 200.0)'
+'                  expr: (key < 200)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'
@@ -359,7 +359,7 @@ ON src_12.key = src3.k AND src3.k < 200;
 '            alias: src'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 200.0)'
+'                  expr: (key < 200)'
 '                  type: boolean'
 '              Select Operator'
 '                expressions:'

Modified: hive/branches/tez/ql/src/test/results/beelinepositive/auto_join28.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/beelinepositive/auto_join28.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/beelinepositive/auto_join28.q.out (original)
+++ hive/branches/tez/ql/src/test/results/beelinepositive/auto_join28.q.out Mon Jul 29 21:08:03 2013
@@ -46,7 +46,7 @@ SELECT * FROM src src1 LEFT OUTER JOIN s
 '            alias: src2'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key > 10.0)'
+'                  expr: (key > 10)'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                key expressions:'
@@ -89,9 +89,9 @@ SELECT * FROM src src1 LEFT OUTER JOIN s
 '            1 {VALUE._col0} {VALUE._col1}'
 '            2 {VALUE._col0} {VALUE._col1}'
 '          filter predicates:'
-'            0 {(VALUE._col0 < 10.0)}'
+'            0 {(VALUE._col0 < 10)}'
 '            1 '
-'            2 {(VALUE._col0 < 10.0)}'
+'            2 {(VALUE._col0 < 10)}'
 '          handleSkewJoin: false'
 '          outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9'
 '          Select Operator'
@@ -206,7 +206,7 @@ SELECT * FROM src src1 LEFT OUTER JOIN s
 '            alias: src2'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key > 10.0)'
+'                  expr: (key > 10)'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                key expressions:'
@@ -227,7 +227,7 @@ SELECT * FROM src src1 LEFT OUTER JOIN s
 '            alias: src3'
 '            Filter Operator'
 '              predicate:'
-'                  expr: ((key < 10.0) and (key > 10.0))'
+'                  expr: ((key < 10) and (key > 10))'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                key expressions:'
@@ -253,7 +253,7 @@ SELECT * FROM src src1 LEFT OUTER JOIN s
 '            1 {VALUE._col0} {VALUE._col1}'
 '            2 {VALUE._col0} {VALUE._col1}'
 '          filter predicates:'
-'            0 {(VALUE._col0 < 10.0)}'
+'            0 {(VALUE._col0 < 10)}'
 '            1 '
 '            2 '
 '          handleSkewJoin: false'
@@ -353,7 +353,7 @@ SELECT * FROM src src1 RIGHT OUTER JOIN 
 '            alias: src1'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                key expressions:'
@@ -391,7 +391,7 @@ SELECT * FROM src src1 RIGHT OUTER JOIN 
 '            alias: src3'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                key expressions:'
@@ -418,7 +418,7 @@ SELECT * FROM src src1 RIGHT OUTER JOIN 
 '            2 {VALUE._col0} {VALUE._col1}'
 '          filter predicates:'
 '            0 '
-'            1 {(VALUE._col0 > 10.0)}'
+'            1 {(VALUE._col0 > 10)}'
 '            2 '
 '          handleSkewJoin: false'
 '          outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9'
@@ -517,7 +517,7 @@ SELECT * FROM src src1 RIGHT OUTER JOIN 
 '            alias: src1'
 '            Filter Operator'
 '              predicate:'
-'                  expr: (key < 10.0)'
+'                  expr: (key < 10)'
 '                  type: boolean'
 '              Reduce Output Operator'
 '                key expressions:'
@@ -578,8 +578,8 @@ SELECT * FROM src src1 RIGHT OUTER JOIN 
 '            2 {VALUE._col0} {VALUE._col1}'
 '          filter predicates:'
 '            0 '
-'            1 {(VALUE._col0 > 10.0)}'
-'            2 {(VALUE._col0 < 10.0)}'
+'            1 {(VALUE._col0 > 10)}'
+'            2 {(VALUE._col0 < 10)}'
 '          handleSkewJoin: false'
 '          outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9'
 '          Select Operator'