You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/09/12 03:21:29 UTC

svn commit: r1522098 [22/30] - in /hive/branches/vectorization: ./ beeline/src/test/org/apache/hive/beeline/src/test/ bin/ bin/ext/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/java/org/a...

Modified: hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java (original)
+++ hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java Thu Sep 12 01:21:10 2013
@@ -22,6 +22,8 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
+import org.apache.hadoop.hive.ql.io.sarg.TestSearchArgumentImpl;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -36,10 +38,12 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -128,6 +132,48 @@ public class TestOrcFile {
     }
   }
 
+  public static class AllTypesRow {
+    Boolean boolean1;
+    Byte byte1;
+    Short short1;
+    Integer int1;
+    Long long1;
+    Float float1;
+    Double double1;
+    BytesWritable bytes1;
+    Text string1;
+    MiddleStruct middle;
+    List<InnerStruct> list = new ArrayList<InnerStruct>();
+    Map<Text, InnerStruct> map = new HashMap<Text, InnerStruct>();
+    Timestamp ts;
+    HiveDecimal decimal1;
+
+    AllTypesRow(Boolean b1, Byte b2, Short s1, Integer i1, Long l1, Float f1,
+           Double d1,
+           BytesWritable b3, String s2, MiddleStruct m1,
+           List<InnerStruct> l2, Map<Text, InnerStruct> m2,
+           Timestamp ts1, HiveDecimal decimal) {
+      this.boolean1 = b1;
+      this.byte1 = b2;
+      this.short1 = s1;
+      this.int1 = i1;
+      this.long1 = l1;
+      this.float1 = f1;
+      this.double1 = d1;
+      this.bytes1 = b3;
+      if (s2 == null) {
+        this.string1 = null;
+      } else {
+        this.string1 = new Text(s2);
+      }
+      this.middle = m1;
+      this.list = l2;
+      this.map = m2;
+      this.ts = ts1;
+      this.decimal1 = decimal;
+    }
+  }
+
   private static InnerStruct inner(int i, String s) {
     return new InnerStruct(i, s);
   }
@@ -185,14 +231,301 @@ public class TestOrcFile {
   }
 
   @Test
+  public void testWriteFormat_0_11() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory
+          .getReflectionObjectInspector(AllTypesRow.class,
+              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    conf.set("hive.exec.orc.write.format", "0.11");
+    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
+        100000, CompressionKind.NONE, 10000, 10000);
+    for(int i = 0; i < 7500; i++) {
+      if (i % 2 == 0) {
+        writer.addRow(new AllTypesRow(false, (byte) 1, (short) 1024, 65536,
+            Long.MAX_VALUE, (float) 1.0, -15.0, bytes(0, 1, 2, 3, 4), "hi",
+            new MiddleStruct(inner(1, "bye"), inner(2, "sigh")), list(
+                inner(3, "good"), inner(4, "bad")), map(), Timestamp
+                .valueOf("2000-03-12 15:00:00"), new HiveDecimal(
+                "12345678.6547456")));
+      } else {
+        writer.addRow(new AllTypesRow(true, (byte) 100, (short) 2048, 65536,
+            Long.MAX_VALUE, (float) 2.0, -5.0, bytes(), "bye",
+            new MiddleStruct(inner(1, "bye"), inner(2, "sigh")), list(
+                inner(100000000, "cat"), inner(-100000, "in"),
+                inner(1234, "hat")),
+            map(inner(5, "chani"), inner(1, "mauddib")), Timestamp
+                .valueOf("2000-03-12 15:00:01"), new HiveDecimal(
+                "12345678.6547457")));
+      }
+    }
+    writer.close();
+  }
+
+  @Test
+  public void testReadFormat_0_11() throws Exception {
+    Path resourceDir = new Path(System.getProperty("test.build.resources", "ql"
+        + File.separator + "src" + File.separator + "test" + File.separator
+        + "resources"));
+    Path oldFilePath = new Path(resourceDir, "orc-file-11-format.orc");
+    Reader reader = OrcFile.createReader(fs, oldFilePath);
+
+    int stripeCount = 0;
+    int rowCount = 0;
+    long currentOffset = -1;
+    for(StripeInformation stripe : reader.getStripes()) {
+      stripeCount += 1;
+      rowCount += stripe.getNumberOfRows();
+      if (currentOffset < 0) {
+        currentOffset = stripe.getOffset() + stripe.getIndexLength()
+            + stripe.getDataLength() + stripe.getFooterLength();
+      } else {
+        assertEquals(currentOffset, stripe.getOffset());
+        currentOffset += stripe.getIndexLength() + stripe.getDataLength()
+            + stripe.getFooterLength();
+      }
+    }
+    assertEquals(reader.getNumberOfRows(), rowCount);
+    assertEquals(2, stripeCount);
+
+    // check the stats
+    ColumnStatistics[] stats = reader.getStatistics();
+    assertEquals(7500, stats[1].getNumberOfValues());
+    assertEquals(3750, ((BooleanColumnStatistics) stats[1]).getFalseCount());
+    assertEquals(3750, ((BooleanColumnStatistics) stats[1]).getTrueCount());
+    assertEquals("count: 7500 true: 3750", stats[1].toString());
+
+    assertEquals(2048, ((IntegerColumnStatistics) stats[3]).getMaximum());
+    assertEquals(1024, ((IntegerColumnStatistics) stats[3]).getMinimum());
+    assertEquals(true, ((IntegerColumnStatistics) stats[3]).isSumDefined());
+    assertEquals(11520000, ((IntegerColumnStatistics) stats[3]).getSum());
+    assertEquals("count: 7500 min: 1024 max: 2048 sum: 11520000",
+        stats[3].toString());
+
+    assertEquals(Long.MAX_VALUE,
+        ((IntegerColumnStatistics) stats[5]).getMaximum());
+    assertEquals(Long.MAX_VALUE,
+        ((IntegerColumnStatistics) stats[5]).getMinimum());
+    assertEquals(false, ((IntegerColumnStatistics) stats[5]).isSumDefined());
+    assertEquals(
+        "count: 7500 min: 9223372036854775807 max: 9223372036854775807",
+        stats[5].toString());
+
+    assertEquals(-15.0, ((DoubleColumnStatistics) stats[7]).getMinimum());
+    assertEquals(-5.0, ((DoubleColumnStatistics) stats[7]).getMaximum());
+    assertEquals(-75000.0, ((DoubleColumnStatistics) stats[7]).getSum(),
+        0.00001);
+    assertEquals("count: 7500 min: -15.0 max: -5.0 sum: -75000.0",
+        stats[7].toString());
+
+    assertEquals("count: 7500 min: bye max: hi", stats[9].toString());
+
+    // check the inspectors
+    StructObjectInspector readerInspector = (StructObjectInspector) reader
+        .getObjectInspector();
+    assertEquals(ObjectInspector.Category.STRUCT, readerInspector.getCategory());
+    assertEquals("struct<boolean1:boolean,byte1:tinyint,short1:smallint,"
+        + "int1:int,long1:bigint,float1:float,double1:double,bytes1:"
+        + "binary,string1:string,middle:struct<list:array<struct<int1:int,"
+        + "string1:string>>>,list:array<struct<int1:int,string1:string>>,"
+        + "map:map<string,struct<int1:int,string1:string>>,ts:timestamp,"
+        + "decimal1:decimal>", readerInspector.getTypeName());
+    List<? extends StructField> fields = readerInspector
+        .getAllStructFieldRefs();
+    BooleanObjectInspector bo = (BooleanObjectInspector) readerInspector
+        .getStructFieldRef("boolean1").getFieldObjectInspector();
+    ByteObjectInspector by = (ByteObjectInspector) readerInspector
+        .getStructFieldRef("byte1").getFieldObjectInspector();
+    ShortObjectInspector sh = (ShortObjectInspector) readerInspector
+        .getStructFieldRef("short1").getFieldObjectInspector();
+    IntObjectInspector in = (IntObjectInspector) readerInspector
+        .getStructFieldRef("int1").getFieldObjectInspector();
+    LongObjectInspector lo = (LongObjectInspector) readerInspector
+        .getStructFieldRef("long1").getFieldObjectInspector();
+    FloatObjectInspector fl = (FloatObjectInspector) readerInspector
+        .getStructFieldRef("float1").getFieldObjectInspector();
+    DoubleObjectInspector dbl = (DoubleObjectInspector) readerInspector
+        .getStructFieldRef("double1").getFieldObjectInspector();
+    BinaryObjectInspector bi = (BinaryObjectInspector) readerInspector
+        .getStructFieldRef("bytes1").getFieldObjectInspector();
+    StringObjectInspector st = (StringObjectInspector) readerInspector
+        .getStructFieldRef("string1").getFieldObjectInspector();
+    StructObjectInspector mid = (StructObjectInspector) readerInspector
+        .getStructFieldRef("middle").getFieldObjectInspector();
+    List<? extends StructField> midFields = mid.getAllStructFieldRefs();
+    ListObjectInspector midli = (ListObjectInspector) midFields.get(0)
+        .getFieldObjectInspector();
+    StructObjectInspector inner = (StructObjectInspector) midli
+        .getListElementObjectInspector();
+    List<? extends StructField> inFields = inner.getAllStructFieldRefs();
+    ListObjectInspector li = (ListObjectInspector) readerInspector
+        .getStructFieldRef("list").getFieldObjectInspector();
+    MapObjectInspector ma = (MapObjectInspector) readerInspector
+        .getStructFieldRef("map").getFieldObjectInspector();
+    TimestampObjectInspector tso = (TimestampObjectInspector) readerInspector
+        .getStructFieldRef("ts").getFieldObjectInspector();
+    HiveDecimalObjectInspector dco = (HiveDecimalObjectInspector) readerInspector
+        .getStructFieldRef("decimal1").getFieldObjectInspector();
+    StringObjectInspector mk = (StringObjectInspector) ma
+        .getMapKeyObjectInspector();
+    RecordReader rows = reader.rows(null);
+    Object row = rows.next(null);
+    assertNotNull(row);
+    // check the contents of the first row
+    assertEquals(false,
+        bo.get(readerInspector.getStructFieldData(row, fields.get(0))));
+    assertEquals(1,
+        by.get(readerInspector.getStructFieldData(row, fields.get(1))));
+    assertEquals(1024,
+        sh.get(readerInspector.getStructFieldData(row, fields.get(2))));
+    assertEquals(65536,
+        in.get(readerInspector.getStructFieldData(row, fields.get(3))));
+    assertEquals(Long.MAX_VALUE,
+        lo.get(readerInspector.getStructFieldData(row, fields.get(4))));
+    assertEquals(1.0,
+        fl.get(readerInspector.getStructFieldData(row, fields.get(5))), 0.00001);
+    assertEquals(-15.0,
+        dbl.get(readerInspector.getStructFieldData(row, fields.get(6))),
+        0.00001);
+    assertEquals(bytes(0, 1, 2, 3, 4),
+        bi.getPrimitiveWritableObject(readerInspector.getStructFieldData(row,
+            fields.get(7))));
+    assertEquals("hi", st.getPrimitiveJavaObject(readerInspector
+        .getStructFieldData(row, fields.get(8))));
+    List<?> midRow = midli.getList(mid.getStructFieldData(
+        readerInspector.getStructFieldData(row, fields.get(9)),
+        midFields.get(0)));
+    assertNotNull(midRow);
+    assertEquals(2, midRow.size());
+    assertEquals(1,
+        in.get(inner.getStructFieldData(midRow.get(0), inFields.get(0))));
+    assertEquals("bye", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        midRow.get(0), inFields.get(1))));
+    assertEquals(2,
+        in.get(inner.getStructFieldData(midRow.get(1), inFields.get(0))));
+    assertEquals("sigh", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        midRow.get(1), inFields.get(1))));
+    List<?> list = li.getList(readerInspector.getStructFieldData(row,
+        fields.get(10)));
+    assertEquals(2, list.size());
+    assertEquals(3,
+        in.get(inner.getStructFieldData(list.get(0), inFields.get(0))));
+    assertEquals("good", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        list.get(0), inFields.get(1))));
+    assertEquals(4,
+        in.get(inner.getStructFieldData(list.get(1), inFields.get(0))));
+    assertEquals("bad", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        list.get(1), inFields.get(1))));
+    Map<?, ?> map = ma.getMap(readerInspector.getStructFieldData(row,
+        fields.get(11)));
+    assertEquals(0, map.size());
+    assertEquals(Timestamp.valueOf("2000-03-12 15:00:00"),
+        tso.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,
+            fields.get(12))));
+    assertEquals(new HiveDecimal("12345678.6547456"),
+        dco.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,
+            fields.get(13))));
+
+    // check the contents of second row
+    assertEquals(true, rows.hasNext());
+    rows.seekToRow(7499);
+    row = rows.next(null);
+    assertEquals(true,
+        bo.get(readerInspector.getStructFieldData(row, fields.get(0))));
+    assertEquals(100,
+        by.get(readerInspector.getStructFieldData(row, fields.get(1))));
+    assertEquals(2048,
+        sh.get(readerInspector.getStructFieldData(row, fields.get(2))));
+    assertEquals(65536,
+        in.get(readerInspector.getStructFieldData(row, fields.get(3))));
+    assertEquals(Long.MAX_VALUE,
+        lo.get(readerInspector.getStructFieldData(row, fields.get(4))));
+    assertEquals(2.0,
+        fl.get(readerInspector.getStructFieldData(row, fields.get(5))), 0.00001);
+    assertEquals(-5.0,
+        dbl.get(readerInspector.getStructFieldData(row, fields.get(6))),
+        0.00001);
+    assertEquals(bytes(), bi.getPrimitiveWritableObject(readerInspector
+        .getStructFieldData(row, fields.get(7))));
+    assertEquals("bye", st.getPrimitiveJavaObject(readerInspector
+        .getStructFieldData(row, fields.get(8))));
+    midRow = midli.getList(mid.getStructFieldData(
+        readerInspector.getStructFieldData(row, fields.get(9)),
+        midFields.get(0)));
+    assertNotNull(midRow);
+    assertEquals(2, midRow.size());
+    assertEquals(1,
+        in.get(inner.getStructFieldData(midRow.get(0), inFields.get(0))));
+    assertEquals("bye", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        midRow.get(0), inFields.get(1))));
+    assertEquals(2,
+        in.get(inner.getStructFieldData(midRow.get(1), inFields.get(0))));
+    assertEquals("sigh", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        midRow.get(1), inFields.get(1))));
+    list = li.getList(readerInspector.getStructFieldData(row, fields.get(10)));
+    assertEquals(3, list.size());
+    assertEquals(100000000,
+        in.get(inner.getStructFieldData(list.get(0), inFields.get(0))));
+    assertEquals("cat", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        list.get(0), inFields.get(1))));
+    assertEquals(-100000,
+        in.get(inner.getStructFieldData(list.get(1), inFields.get(0))));
+    assertEquals("in", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        list.get(1), inFields.get(1))));
+    assertEquals(1234,
+        in.get(inner.getStructFieldData(list.get(2), inFields.get(0))));
+    assertEquals("hat", st.getPrimitiveJavaObject(inner.getStructFieldData(
+        list.get(2), inFields.get(1))));
+    map = ma.getMap(readerInspector.getStructFieldData(row, fields.get(11)));
+    assertEquals(2, map.size());
+    boolean[] found = new boolean[2];
+    for(Object key : map.keySet()) {
+      String str = mk.getPrimitiveJavaObject(key);
+      if (str.equals("chani")) {
+        assertEquals(false, found[0]);
+        assertEquals(5,
+            in.get(inner.getStructFieldData(map.get(key), inFields.get(0))));
+        assertEquals(str, st.getPrimitiveJavaObject(inner.getStructFieldData(
+            map.get(key), inFields.get(1))));
+        found[0] = true;
+      } else if (str.equals("mauddib")) {
+        assertEquals(false, found[1]);
+        assertEquals(1,
+            in.get(inner.getStructFieldData(map.get(key), inFields.get(0))));
+        assertEquals(str, st.getPrimitiveJavaObject(inner.getStructFieldData(
+            map.get(key), inFields.get(1))));
+        found[1] = true;
+      } else {
+        throw new IllegalArgumentException("Unknown key " + str);
+      }
+    }
+    assertEquals(true, found[0]);
+    assertEquals(true, found[1]);
+    assertEquals(Timestamp.valueOf("2000-03-12 15:00:01"),
+        tso.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,
+            fields.get(12))));
+    assertEquals(new HiveDecimal("12345678.6547457"),
+        dco.getPrimitiveJavaObject(readerInspector.getStructFieldData(row,
+            fields.get(13))));
+
+    // handle the close up
+    assertEquals(false, rows.hasNext());
+    rows.close();
+  }
+
+  @Test
   public void test1() throws Exception {
     ObjectInspector inspector;
     synchronized (TestOrcFile.class) {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.ZLIB, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .bufferSize(10000));
     writer.addRow(new BigRow(false, (byte) 1, (short) 1024, 65536,
         Long.MAX_VALUE, (float) 1.0, -15.0, bytes(0,1,2,3,4), "hi",
         new MiddleStruct(inner(1, "bye"), inner(2, "sigh")),
@@ -423,8 +756,13 @@ public class TestOrcFile {
           (InnerStruct.class,
               ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        1000, CompressionKind.NONE, 100, 1000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(100)
+                                         .rowIndexStride(1000));
     Random r1 = new Random(1);
     Random r2 = new Random(2);
     int x;
@@ -506,8 +844,12 @@ public class TestOrcFile {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        1000, CompressionKind.NONE, 100, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(100));
     writer.close();
     Reader reader = OrcFile.createReader(fs, testFilePath);
     assertEquals(false, reader.rows(null).hasNext());
@@ -526,9 +868,14 @@ public class TestOrcFile {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        1000, CompressionKind.NONE, 100, 10000);
-    writer.addUserMetadata("my.meta", byteBuf(1, 2, 3, 4, 5, 6, 7, -1, -2, 127, -128));
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(100));
+    writer.addUserMetadata("my.meta", byteBuf(1, 2, 3, 4, 5, 6, 7, -1, -2, 127,
+                                              -128));
     writer.addUserMetadata("clobber", byteBuf(1,2,3));
     writer.addUserMetadata("clobber", byteBuf(4,3,2,1));
     ByteBuffer bigBuf = ByteBuffer.allocate(40000);
@@ -592,8 +939,13 @@ public class TestOrcFile {
       inspector = OrcStruct.createObjectInspector(0, types);
     }
     HiveDecimal maxValue = new HiveDecimal("100000000000000000000");
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        1000, CompressionKind.NONE, 100, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(100)
+                                         .blockPadding(false));
     OrcStruct row = new OrcStruct(3);
     OrcUnion union = new OrcUnion();
     row.setFieldValue(1, union);
@@ -683,6 +1035,7 @@ public class TestOrcFile {
     assertEquals(0.0, rows.getProgress(), 0.000001);
     assertEquals(true, rows.hasNext());
     row = (OrcStruct) rows.next(null);
+    assertEquals(1, rows.getRowNumber());
     inspector = reader.getObjectInspector();
     assertEquals("struct<time:timestamp,union:uniontype<int,string>,decimal:decimal>",
         inspector.getTypeName());
@@ -693,6 +1046,7 @@ public class TestOrcFile {
     assertEquals(new IntWritable(42), union.getObject());
     assertEquals(new HiveDecimal("12345678.6547456"), row.getFieldValue(2));
     row = (OrcStruct) rows.next(row);
+    assertEquals(2, rows.getRowNumber());
     assertEquals(Timestamp.valueOf("2000-03-20 12:00:00.123456789"),
         row.getFieldValue(0));
     assertEquals(1, union.getTag());
@@ -769,8 +1123,12 @@ public class TestOrcFile {
           (InnerStruct.class,
               ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        1000, CompressionKind.SNAPPY, 100, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(1000)
+                                         .compress(CompressionKind.SNAPPY)
+                                         .bufferSize(100));
     Random rand = new Random(12);
     for(int i=0; i < 10000; ++i) {
       writer.addRow(new InnerStruct(rand.nextInt(),
@@ -804,8 +1162,13 @@ public class TestOrcFile {
           (InnerStruct.class,
               ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        5000, CompressionKind.SNAPPY, 1000, 0);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(5000)
+                                         .compress(CompressionKind.SNAPPY)
+                                         .bufferSize(1000)
+                                         .rowIndexStride(0));
     Random rand = new Random(24);
     for(int i=0; i < 10000; ++i) {
       InnerStruct row = new InnerStruct(rand.nextInt(),
@@ -845,8 +1208,12 @@ public class TestOrcFile {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        200000, CompressionKind.ZLIB, 65536, 1000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(200000)
+                                         .bufferSize(65536)
+                                         .rowIndexStride(1000));
     Random rand = new Random(42);
     final int COUNT=32768;
     long[] intValues= new long[COUNT];
@@ -1028,8 +1395,14 @@ public class TestOrcFile {
               ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
     MyMemoryManager memory = new MyMemoryManager(conf, 10000, 0.1);
-    Writer writer = new WriterImpl(fs, testFilePath, conf, inspector,
-        50000, CompressionKind.NONE, 100, 0, memory);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .compress(CompressionKind.NONE)
+                                         .stripeSize(50000)
+                                         .bufferSize(100)
+                                         .rowIndexStride(0)
+                                         .memory(memory));
     assertEquals(testFilePath, memory.path);
     for(int i=0; i < 2500; ++i) {
       writer.addRow(new InnerStruct(i*300, Integer.toHexString(10*i)));
@@ -1046,4 +1419,86 @@ public class TestOrcFile {
     assertEquals(25, i);
     assertEquals(2500, reader.getNumberOfRows());
   }
+
+  @Test
+  public void testPredicatePushdown() throws Exception {
+    ObjectInspector inspector;
+    synchronized (TestOrcFile.class) {
+      inspector = ObjectInspectorFactory.getReflectionObjectInspector
+          (InnerStruct.class,
+              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+    }
+    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
+        400000L, CompressionKind.NONE, 500, 1000);
+    for(int i=0; i < 3500; ++i) {
+      writer.addRow(new InnerStruct(i*300, Integer.toHexString(10*i)));
+    }
+    writer.close();
+    Reader reader = OrcFile.createReader(fs, testFilePath);
+    assertEquals(3500, reader.getNumberOfRows());
+
+    SearchArgument sarg = SearchArgument.FACTORY.newBuilder()
+        .startAnd()
+          .startNot()
+             .lessThan("int1", 300000)
+          .end()
+          .lessThan("int1", 600000)
+        .end()
+        .build();
+    RecordReader rows = reader.rows(0L, Long.MAX_VALUE,
+        new boolean[]{true, true, true}, sarg,
+        new String[]{null, "int1", "string1"});
+    assertEquals(1000L, rows.getRowNumber());
+    OrcStruct row = null;
+    for(int i=1000; i < 2000; ++i) {
+      assertTrue(rows.hasNext());
+      row = (OrcStruct) rows.next(row);
+      assertEquals(300 * i, ((IntWritable) row.getFieldValue(0)).get());
+      assertEquals(Integer.toHexString(10*i), row.getFieldValue(1).toString());
+    }
+    assertTrue(!rows.hasNext());
+    assertEquals(3500, rows.getRowNumber());
+
+    // look through the file with no rows selected
+    sarg = SearchArgument.FACTORY.newBuilder()
+        .startAnd()
+          .lessThan("int1", 0)
+        .end()
+        .build();
+    rows = reader.rows(0L, Long.MAX_VALUE,
+        new boolean[]{true, true, true}, sarg,
+        new String[]{null, "int1", "string1"});
+    assertEquals(3500L, rows.getRowNumber());
+    assertTrue(!rows.hasNext());
+
+    // select first 100 and last 100 rows
+    sarg = SearchArgument.FACTORY.newBuilder()
+        .startOr()
+          .lessThan("int1", 300 * 100)
+          .startNot()
+            .lessThan("int1", 300 * 3400)
+          .end()
+        .end()
+        .build();
+    rows = reader.rows(0L, Long.MAX_VALUE,
+        new boolean[]{true, true, true}, sarg,
+        new String[]{null, "int1", "string1"});
+    row = null;
+    for(int i=0; i < 1000; ++i) {
+      assertTrue(rows.hasNext());
+      assertEquals(i, rows.getRowNumber());
+      row = (OrcStruct) rows.next(row);
+      assertEquals(300 * i, ((IntWritable) row.getFieldValue(0)).get());
+      assertEquals(Integer.toHexString(10*i), row.getFieldValue(1).toString());
+    }
+    for(int i=3000; i < 3500; ++i) {
+      assertTrue(rows.hasNext());
+      assertEquals(i, rows.getRowNumber());
+      row = (OrcStruct) rows.next(row);
+      assertEquals(300 * i, ((IntWritable) row.getFieldValue(0)).get());
+      assertEquals(Integer.toHexString(10*i), row.getFieldValue(1).toString());
+    }
+    assertTrue(!rows.hasNext());
+    assertEquals(3500, rows.getRowNumber());
+  }
 }

Modified: hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcNullOptimization.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcNullOptimization.java?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcNullOptimization.java (original)
+++ hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcNullOptimization.java Thu Sep 12 01:21:10 2013
@@ -91,15 +91,21 @@ public class TestOrcNullOptimization {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (MyStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     Random rand = new Random(100);
-    writer.addRow(new MyStruct(null, null, true, Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(null, null, true,
+                               Lists.newArrayList(new InnerStruct(100))));
     for (int i = 2; i < 20000; i++) {
       writer.addRow(new MyStruct(rand.nextInt(1), "a", true, Lists
           .newArrayList(new InnerStruct(100))));
     }
-    writer.addRow(new MyStruct(null, null, true, Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(null, null, true,
+                               Lists.newArrayList(new InnerStruct(100))));
     writer.close();
 
     Reader reader = OrcFile.createReader(fs, testFilePath);
@@ -117,7 +123,8 @@ public class TestOrcNullOptimization {
 
     assertEquals("a", ((StringColumnStatistics) stats[2]).getMaximum());
     assertEquals("a", ((StringColumnStatistics) stats[2]).getMinimum());
-    assertEquals(19998, ((StringColumnStatistics) stats[2]).getNumberOfValues());
+    assertEquals(19998,
+                 ((StringColumnStatistics) stats[2]).getNumberOfValues());
     assertEquals("count: 19998 min: a max: a",
         stats[2].toString());
 
@@ -142,8 +149,10 @@ public class TestOrcNullOptimization {
     List<Boolean> got = Lists.newArrayList();
     // check if the strip footer contains PRESENT stream
     for (StripeInformation sinfo : reader.getStripes()) {
-      OrcProto.StripeFooter sf = ((RecordReaderImpl) rows).readStripeFooter(sinfo);
-      got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString()) != -1);
+      OrcProto.StripeFooter sf =
+        ((RecordReaderImpl) rows).readStripeFooter(sinfo);
+      got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString())
+              != -1);
     }
     assertEquals(expected, got);
 
@@ -154,7 +163,8 @@ public class TestOrcNullOptimization {
     assertNull(row.getFieldValue(1));
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                 getFieldValue(0));
 
     rows.seekToRow(19998);
     // last-1 row
@@ -164,7 +174,8 @@ public class TestOrcNullOptimization {
     assertEquals(new IntWritable(0), row.getFieldValue(0));
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                 getFieldValue(0));
 
     // last row
     row = (OrcStruct) rows.next(row);
@@ -173,7 +184,8 @@ public class TestOrcNullOptimization {
     assertNull(row.getFieldValue(1));
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                 getFieldValue(0));
 
     rows.close();
   }
@@ -185,14 +197,19 @@ public class TestOrcNullOptimization {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (MyStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.NONE, 10000, 10000);
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .compress(CompressionKind.NONE)
+                                         .bufferSize(10000));
     Random rand = new Random(100);
     for (int i = 1; i < 20000; i++) {
       writer.addRow(new MyStruct(rand.nextInt(1), "a", true, Lists
           .newArrayList(new InnerStruct(100))));
     }
-    writer.addRow(new MyStruct(0, "b", true, Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(0, "b", true,
+                               Lists.newArrayList(new InnerStruct(100))));
     writer.close();
 
     Reader reader = OrcFile.createReader(fs, testFilePath);
@@ -210,7 +227,8 @@ public class TestOrcNullOptimization {
 
     assertEquals("b", ((StringColumnStatistics) stats[2]).getMaximum());
     assertEquals("a", ((StringColumnStatistics) stats[2]).getMinimum());
-    assertEquals(20000, ((StringColumnStatistics) stats[2]).getNumberOfValues());
+    assertEquals(20000,
+                 ((StringColumnStatistics) stats[2]).getNumberOfValues());
     assertEquals("count: 20000 min: a max: b",
         stats[2].toString());
 
@@ -233,8 +251,10 @@ public class TestOrcNullOptimization {
     List<Boolean> got = Lists.newArrayList();
     // check if the strip footer contains PRESENT stream
     for (StripeInformation sinfo : reader.getStripes()) {
-      OrcProto.StripeFooter sf = ((RecordReaderImpl) rows).readStripeFooter(sinfo);
-      got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString()) != -1);
+      OrcProto.StripeFooter sf =
+        ((RecordReaderImpl) rows).readStripeFooter(sinfo);
+      got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString())
+              != -1);
     }
     assertEquals(expected, got);
 
@@ -247,7 +267,8 @@ public class TestOrcNullOptimization {
     assertEquals("a", row.getFieldValue(1).toString());
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+                 ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                   getFieldValue(0));
 
     // last row
     row = (OrcStruct) rows.next(row);
@@ -257,8 +278,8 @@ public class TestOrcNullOptimization {
     assertEquals("b", row.getFieldValue(1).toString());
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
-
+                 ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                   getFieldValue(0));
     rows.close();
   }
 
@@ -269,16 +290,27 @@ public class TestOrcNullOptimization {
       inspector = ObjectInspectorFactory.getReflectionObjectInspector
           (MyStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     }
-    Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.ZLIB, 10000, 10000);
-    writer.addRow(new MyStruct(3, "a", true, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(null, "b", true, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(3, null, false, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(3, "d", true, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(2, "e", true, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(2, "f", true, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(2, "g", true, Lists.newArrayList(new InnerStruct(100))));
-    writer.addRow(new MyStruct(2, "h", true, Lists.newArrayList(new InnerStruct(100))));
+    Writer writer = OrcFile.createWriter(testFilePath,
+                                         OrcFile.writerOptions(conf)
+                                         .inspector(inspector)
+                                         .stripeSize(100000)
+                                         .bufferSize(10000));
+    writer.addRow(new MyStruct(3, "a", true,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(null, "b", true,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(3, null, false,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(3, "d", true,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(2, "e", true,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(2, "f", true,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(2, "g", true,
+                               Lists.newArrayList(new InnerStruct(100))));
+    writer.addRow(new MyStruct(2, "h", true,
+                               Lists.newArrayList(new InnerStruct(100))));
     writer.close();
 
     Reader reader = OrcFile.createReader(fs, testFilePath);
@@ -319,8 +351,10 @@ public class TestOrcNullOptimization {
     List<Boolean> got = Lists.newArrayList();
     // check if the strip footer contains PRESENT stream
     for (StripeInformation sinfo : reader.getStripes()) {
-      OrcProto.StripeFooter sf = ((RecordReaderImpl) rows).readStripeFooter(sinfo);
-      got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString()) != -1);
+      OrcProto.StripeFooter sf =
+        ((RecordReaderImpl) rows).readStripeFooter(sinfo);
+      got.add(sf.toString().indexOf(OrcProto.Stream.Kind.PRESENT.toString())
+              != -1);
     }
     assertEquals(expected, got);
 
@@ -331,7 +365,8 @@ public class TestOrcNullOptimization {
     assertEquals("a", row.getFieldValue(1).toString());
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                 getFieldValue(0));
 
     // row 2
     row = (OrcStruct) rows.next(row);
@@ -340,7 +375,8 @@ public class TestOrcNullOptimization {
     assertEquals("b", row.getFieldValue(1).toString());
     assertEquals(new BooleanWritable(true), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                 getFieldValue(0));
 
     // row 3
     row = (OrcStruct) rows.next(row);
@@ -349,7 +385,8 @@ public class TestOrcNullOptimization {
     assertEquals(new IntWritable(3), row.getFieldValue(0));
     assertEquals(new BooleanWritable(false), row.getFieldValue(2));
     assertEquals(new IntWritable(100),
-        ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).getFieldValue(0));
+                 ((OrcStruct) ((ArrayList<?>) row.getFieldValue(3)).get(0)).
+                 getFieldValue(0));
     rows.close();
   }
 }

Modified: hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestRunLengthByteReader.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestRunLengthByteReader.java?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestRunLengthByteReader.java (original)
+++ hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestRunLengthByteReader.java Thu Sep 12 01:21:10 2013
@@ -46,7 +46,7 @@ public class TestRunLengthByteReader {
     collect.buffer.setByteBuffer(inBuf, 0, collect.buffer.size());
     inBuf.flip();
     RunLengthByteReader in = new RunLengthByteReader(InStream.create("test",
-        inBuf, null, 100));
+        new ByteBuffer[]{inBuf}, new long[]{0}, inBuf.remaining(), null, 100));
     for(int i=0; i < 2048; ++i) {
       int x = in.next() & 0xff;
       if (i < 1024) {
@@ -88,7 +88,7 @@ public class TestRunLengthByteReader {
     collect.buffer.setByteBuffer(inBuf, 0, collect.buffer.size());
     inBuf.flip();
     RunLengthByteReader in = new RunLengthByteReader(InStream.create("test",
-        inBuf, codec, 500));
+        new ByteBuffer[]{inBuf}, new long[]{0}, inBuf.remaining(), codec, 500));
     for(int i=0; i < 2048; ++i) {
       int x = in.next() & 0xff;
       if (i < 1024) {
@@ -125,7 +125,7 @@ public class TestRunLengthByteReader {
     collect.buffer.setByteBuffer(inBuf, 0, collect.buffer.size());
     inBuf.flip();
     RunLengthByteReader in = new RunLengthByteReader(InStream.create("test",
-        inBuf, null, 100));
+        new ByteBuffer[]{inBuf}, new long[]{0}, inBuf.remaining(), null, 100));
     for(int i=0; i < 2048; i += 10) {
       int x = in.next() & 0xff;
       if (i < 1024) {

Modified: hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestRunLengthIntegerReader.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestRunLengthIntegerReader.java?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestRunLengthIntegerReader.java (original)
+++ hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestRunLengthIntegerReader.java Thu Sep 12 01:21:10 2013
@@ -54,7 +54,8 @@ public class TestRunLengthIntegerReader 
     collect.buffer.setByteBuffer(inBuf, 0, collect.buffer.size());
     inBuf.flip();
     RunLengthIntegerReader in = new RunLengthIntegerReader(InStream.create
-        ("test", inBuf, codec, 1000), true);
+        ("test", new ByteBuffer[]{inBuf}, new long[]{0}, inBuf.remaining(),
+            codec, 1000), true);
     for(int i=0; i < 2048; ++i) {
       int x = (int) in.next();
       if (i < 1024) {
@@ -105,7 +106,8 @@ public class TestRunLengthIntegerReader 
     collect.buffer.setByteBuffer(inBuf, 0, collect.buffer.size());
     inBuf.flip();
     RunLengthIntegerReader in = new RunLengthIntegerReader(InStream.create
-        ("test", inBuf, null, 100), true);
+        ("test", new ByteBuffer[]{inBuf}, new long[]{0}, inBuf.remaining(),
+            null, 100), true);
     for(int i=0; i < 2048; i += 10) {
       int x = (int) in.next();
       if (i < 1024) {

Modified: hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java (original)
+++ hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java Thu Sep 12 01:21:10 2013
@@ -24,8 +24,6 @@ import org.apache.hadoop.hive.ql.io.sarg
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentImpl.ExpressionBuilder;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentImpl.ExpressionTree;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
 import org.apache.mina.util.IdentityHashSet;
 import org.junit.Test;
 
@@ -40,6 +38,9 @@ import static junit.framework.Assert.ass
  * to true and using a custom record reader that prints out the value of
  * hive.io.filter.expr.serialized in createRecordReader. This should be
  * replaced by generating the AST using the API and passing that in.
+ *
+ * In each case, the corresponding part of the where clause is in the
+ * comment above the blob.
  */
 public class TestSearchArgumentImpl {
 
@@ -63,6 +64,19 @@ public class TestSearchArgumentImpl {
     return new ExpressionTree(val);
   }
 
+  /**
+   * Create a predicate leaf. This is used by another test.
+   */
+  public static
+  PredicateLeaf createPredicateLeaf(PredicateLeaf.Operator operator,
+                                    PredicateLeaf.Type type,
+                                    String columnName,
+                                    Object literal,
+                                    List<Object> literalList) {
+    return new SearchArgumentImpl.PredicateLeafImpl(operator, type, columnName,
+        literal, literalList);
+  }
+
   @Test
   public void testNotPushdown() throws Exception {
     assertEquals("leaf-1", ExpressionBuilder.pushDownNot(leaf(1)).toString());
@@ -689,55 +703,55 @@ public class TestSearchArgumentImpl {
     assertEquals(PredicateLeaf.Type.STRING, leaf.getType());
     assertEquals(PredicateLeaf.Operator.EQUALS, leaf.getOperator());
     assertEquals("first_name", leaf.getColumnName());
-    assertEquals(new Text("john"), leaf.getLiteral());
+    assertEquals("john", leaf.getLiteral());
 
     leaf = leaves.get(1);
     assertEquals(PredicateLeaf.Type.STRING, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN_EQUALS, leaf.getOperator());
     assertEquals("first_name", leaf.getColumnName());
-    assertEquals(new Text("greg"), leaf.getLiteral());
+    assertEquals("greg", leaf.getLiteral());
 
     leaf = leaves.get(2);
     assertEquals(PredicateLeaf.Type.STRING, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN, leaf.getOperator());
     assertEquals("first_name", leaf.getColumnName());
-    assertEquals(new Text("alan"), leaf.getLiteral());
+    assertEquals("alan", leaf.getLiteral());
 
     leaf = leaves.get(3);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN_EQUALS, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(12), leaf.getLiteral());
+    assertEquals(12L, leaf.getLiteral());
 
     leaf = leaves.get(4);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN_EQUALS, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(13), leaf.getLiteral());
+    assertEquals(13L, leaf.getLiteral());
 
     leaf = leaves.get(5);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(15), leaf.getLiteral());
+    assertEquals(15L, leaf.getLiteral());
 
     leaf = leaves.get(6);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(16), leaf.getLiteral());
+    assertEquals(16L, leaf.getLiteral());
 
     leaf = leaves.get(7);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.NULL_SAFE_EQUALS, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(30), leaf.getLiteral());
+    assertEquals(30L, leaf.getLiteral());
 
     leaf = leaves.get(8);
     assertEquals(PredicateLeaf.Type.STRING, leaf.getType());
     assertEquals(PredicateLeaf.Operator.NULL_SAFE_EQUALS, leaf.getOperator());
     assertEquals("first_name", leaf.getColumnName());
-    assertEquals(new Text("owen"), leaf.getLiteral());
+    assertEquals("owen", leaf.getLiteral());
 
     assertEquals("(and (or leaf-0 (not leaf-1) leaf-2 (not leaf-3)" +
         " (not leaf-4) leaf-5 leaf-6 leaf-7)" +
@@ -965,19 +979,19 @@ public class TestSearchArgumentImpl {
     assertEquals(PredicateLeaf.Type.STRING, leaf.getType());
     assertEquals(PredicateLeaf.Operator.EQUALS, leaf.getOperator());
     assertEquals("first_name", leaf.getColumnName());
-    assertEquals(new Text("sue"), leaf.getLiteral());
+    assertEquals("sue", leaf.getLiteral());
 
     leaf = leaves.get(2);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(12), leaf.getLiteral());
+    assertEquals(12L, leaf.getLiteral());
 
     leaf = leaves.get(3);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN_EQUALS, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(4), leaf.getLiteral());
+    assertEquals(4L, leaf.getLiteral());
 
     assertEquals("(or leaf-0 (not leaf-1) (not leaf-2) leaf-3)",
         sarg.getExpression().toString());
@@ -1385,20 +1399,20 @@ public class TestSearchArgumentImpl {
     assertEquals(PredicateLeaf.Operator.BETWEEN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
     assertEquals(null, leaf.getLiteral());
-    assertEquals(new LongWritable(23), leaf.getLiteralList().get(0));
-    assertEquals(new LongWritable(45), leaf.getLiteralList().get(1));
+    assertEquals(23L, leaf.getLiteralList().get(0));
+    assertEquals(45L, leaf.getLiteralList().get(1));
 
     leaf = leaves.get(1);
     assertEquals(PredicateLeaf.Type.STRING, leaf.getType());
     assertEquals(PredicateLeaf.Operator.EQUALS, leaf.getOperator());
     assertEquals("first_name", leaf.getColumnName());
-    assertEquals(new Text("alan"), leaf.getLiteral());
+    assertEquals("alan", leaf.getLiteral());
 
     leaf = leaves.get(2);
     assertEquals(PredicateLeaf.Type.STRING, leaf.getType());
     assertEquals(PredicateLeaf.Operator.EQUALS, leaf.getOperator());
     assertEquals("last_name", leaf.getColumnName());
-    assertEquals(new Text("smith"), leaf.getLiteral());
+    assertEquals("smith", leaf.getLiteral());
 
     assertEquals("(and leaf-0 leaf-1 leaf-2)",
         sarg.getExpression().toString());
@@ -1595,21 +1609,21 @@ public class TestSearchArgumentImpl {
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.EQUALS, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(12), leaf.getLiteral());
+    assertEquals(12L, leaf.getLiteral());
 
     leaf = leaves.get(1);
     assertEquals(PredicateLeaf.Type.STRING, leaf.getType());
     assertEquals(PredicateLeaf.Operator.IN, leaf.getOperator());
     assertEquals("first_name", leaf.getColumnName());
-    assertEquals(new Text("john"), leaf.getLiteralList().get(0));
-    assertEquals(new Text("sue"), leaf.getLiteralList().get(1));
+    assertEquals("john", leaf.getLiteralList().get(0));
+    assertEquals("sue", leaf.getLiteralList().get(1));
 
     leaf = leaves.get(2);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.IN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(34), leaf.getLiteralList().get(0));
-    assertEquals(new LongWritable(50), leaf.getLiteralList().get(1));
+    assertEquals(34L, leaf.getLiteralList().get(0));
+    assertEquals(50L, leaf.getLiteralList().get(1));
 
     assertEquals("(and (not leaf-0) leaf-1 leaf-2)",
         sarg.getExpression().toString());
@@ -1854,8 +1868,8 @@ public class TestSearchArgumentImpl {
     assertEquals(PredicateLeaf.Operator.BETWEEN,
         leaves.get(0).getOperator());
     assertEquals("first_name", leaves.get(0).getColumnName());
-    assertEquals(new Text("david"), leaves.get(0).getLiteralList().get(0));
-    assertEquals(new Text("greg"), leaves.get(0).getLiteralList().get(1));
+    assertEquals("david", leaves.get(0).getLiteralList().get(0));
+    assertEquals("greg", leaves.get(0).getLiteralList().get(1));
 
     assertEquals("leaf-0",
         sarg.getExpression().toString());
@@ -2332,55 +2346,55 @@ public class TestSearchArgumentImpl {
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(18), leaf.getLiteral());
+    assertEquals(18L, leaf.getLiteral());
 
     leaf = leaves.get(1);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(10), leaf.getLiteral());
+    assertEquals(10L, leaf.getLiteral());
 
     leaf = leaves.get(2);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(13), leaf.getLiteral());
+    assertEquals(13L, leaf.getLiteral());
 
     leaf = leaves.get(3);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(16), leaf.getLiteral());
+    assertEquals(16L, leaf.getLiteral());
 
     leaf = leaves.get(4);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(11), leaf.getLiteral());
+    assertEquals(11L, leaf.getLiteral());
 
     leaf = leaves.get(5);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(12), leaf.getLiteral());
+    assertEquals(12L, leaf.getLiteral());
 
     leaf = leaves.get(6);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(14), leaf.getLiteral());
+    assertEquals(14L, leaf.getLiteral());
 
     leaf = leaves.get(7);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(15), leaf.getLiteral());
+    assertEquals(15L, leaf.getLiteral());
 
     leaf = leaves.get(8);
     assertEquals(PredicateLeaf.Type.INTEGER, leaf.getType());
     assertEquals(PredicateLeaf.Operator.LESS_THAN, leaf.getOperator());
     assertEquals("id", leaf.getColumnName());
-    assertEquals(new LongWritable(17), leaf.getLiteral());
+    assertEquals(17L, leaf.getLiteral());
 
     assertEquals("(and" +
         " (or leaf-0 leaf-1 leaf-2 leaf-3)" +
@@ -2727,7 +2741,7 @@ public class TestSearchArgumentImpl {
     assertEquals(PredicateLeaf.Operator.LESS_THAN,
         leaves.get(0).getOperator());
     assertEquals("id", leaves.get(0).getColumnName());
-    assertEquals(new LongWritable(10), leaves.get(0).getLiteral());
+    assertEquals(10L, leaves.get(0).getLiteral());
 
     assertEquals("(and (not leaf-0) (not leaf-0))",
         sarg.getExpression().toString());

Modified: hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateAdd.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateAdd.java?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateAdd.java (original)
+++ hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateAdd.java Thu Sep 12 01:21:10 2013
@@ -18,12 +18,12 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
+import java.util.TimeZone;
 
 import junit.framework.TestCase;
 
-import java.util.TimeZone;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
 
 /**
  * JUnit test for UDFDateAdd.
@@ -37,7 +37,7 @@ public class TestUDFDateAdd extends Test
      * on 2009-10-31.
      */
     public void testFallBack() throws Exception {
-        // set the default time zone so that the dates cover 
+        // set the default time zone so that the dates cover
         // the zone's daylight saving time adjustment (2009-10-31)
         // from daylight to standard time
         TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));
@@ -55,7 +55,7 @@ public class TestUDFDateAdd extends Test
      * on 2010-03-14.
      */
     public void testSpringAhead() throws Exception {
-        // set the default time zone so that the dates cover 
+        // set the default time zone so that the dates cover
         // the zone's daylight saving time adjustment (2010-03-14)
         // from standard to daylight time
         TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));

Modified: hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q Thu Sep 12 01:21:10 2013
@@ -12,11 +12,9 @@ CREATE TABLE part( 
     p_comment STRING
 );
 
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
-
 -- testHavingLeadWithNoGBYNoWindowing
 select  p_mfgr,p_name, p_size 
 from part 
-having lead(p_size, 1) <= p_size 
+having lead(p_size, 1) over() <= p_size 
 distribute by p_mfgr 
 sort by p_name;

Modified: hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q Thu Sep 12 01:21:10 2013
@@ -17,6 +17,6 @@ select  p_mfgr,p_name, p_size 
 from noop(on part 
 partition by p_mfgr 
 order by p_name) 
-having lead(p_size, 1) <= p_size 
+having lead(p_size, 1) over() <= p_size 
 distribute by p_mfgr 
 sort by p_name;   

Modified: hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q Thu Sep 12 01:21:10 2013
@@ -16,6 +16,6 @@ CREATE TABLE part( 
 -- testInvalidValueBoundary
 select  p_mfgr,p_name, p_size,   
 sum(p_size) over (w1) as s ,    
-dense_rank() as dr  
+dense_rank() over(w1) as dr  
 from part  
 window w1 as (partition by p_mfgr order by p_complex range between  2 preceding and current row);

Modified: hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q Thu Sep 12 01:21:10 2013
@@ -12,11 +12,9 @@ CREATE TABLE part( 
     p_comment STRING
 );
 
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
-
 -- testWhereWithRankCond
 select  p_mfgr,p_name, p_size, 
-rank() as r 
+rank() over() as r 
 from part 
 where r < 4 
 distribute by p_mfgr 

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/avro_partitioned.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/avro_partitioned.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/avro_partitioned.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/avro_partitioned.q Thu Sep 12 01:21:10 2013
@@ -64,3 +64,10 @@ SET hive.exec.dynamic.partition.mode=non
 INSERT OVERWRITE TABLE episodes_partitioned PARTITION (doctor_pt) SELECT title, air_date, doctor, doctor as doctor_pt FROM episodes;
 
 SELECT * FROM episodes_partitioned WHERE doctor_pt > 6 ORDER BY air_date;
+
+-- Verify that Fetch works in addition to Map
+SELECT * FROM episodes_partitioned LIMIT 5;
+-- Fetch w/filter to specific partition
+SELECT * FROM episodes_partitioned WHERE doctor_pt = 6;
+-- Fetch w/non-existant partition
+SELECT * FROM episodes_partitioned WHERE doctor_pt = 7 LIMIT 5;

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/create_udaf.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/create_udaf.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/create_udaf.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/create_udaf.q Thu Sep 12 01:21:10 2013
@@ -9,4 +9,11 @@ FROM src INSERT OVERWRITE TABLE dest1 SE
 
 SELECT dest1.* FROM dest1;
 
+-- cover all the other value types:
+SELECT test_max(CAST(length(src.value) AS SMALLINT)) FROM src;
+SELECT test_max(CAST(length(src.value) AS BIGINT)) FROM src;
+SELECT test_max(CAST(length(src.value) AS DOUBLE)) FROM src;
+SELECT test_max(CAST(length(src.value) AS FLOAT)) FROM src;
+SELECT test_max(substr(src.value,5)) FROM src;
+
 DROP TEMPORARY FUNCTION test_max;

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/ctas_colname.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/ctas_colname.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/ctas_colname.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/ctas_colname.q Thu Sep 12 01:21:10 2013
@@ -16,10 +16,10 @@ describe formatted x4;
 select * from x4 order by key, value, rr;
 
 explain
-create table x5 as select *, lead(key,1) over(partition by key order by value) from src limit 20;
-create table x5 as select *, lead(key,1) over(partition by key order by value) from src limit 20;
+create table x5 as select *, lead(key,1) over(partition by key order by value) as lead1 from src limit 20;
+create table x5 as select *, lead(key,1) over(partition by key order by value) as lead1 from src limit 20;
 describe formatted x5;
-select * from x5 order by key, value, tok_windowspec;
+select * from x5 order by key, value, lead1;
 
 -- sub queries
 explain

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_dml_6.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_dml_6.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_dml_6.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_dml_6.q Thu Sep 12 01:21:10 2013
@@ -90,7 +90,7 @@ set hive.optimize.listbucketing=true;
 explain extended
 select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484';
 select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484';
-select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484';
+select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' order by key, value, ds, hr;
 
 -- clean up
 drop table list_bucketing_dynamic_part;

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_dml_7.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_dml_7.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_dml_7.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_dml_7.q Thu Sep 12 01:21:10 2013
@@ -64,7 +64,7 @@ set hive.input.format=org.apache.hadoop.
 explain extended
 select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484';
 select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484';
-select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484';
+select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' order by hr;
 
 -- clean up
 drop table list_bucketing_dynamic_part;

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_dml_8.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_dml_8.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_dml_8.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_dml_8.q Thu Sep 12 01:21:10 2013
@@ -81,7 +81,7 @@ select count(*) from list_bucketing_dyna
 explain extended
 select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484';
 select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484';
-select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484';
+select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' order by hr;
 
 -- clean up
 drop table list_bucketing_dynamic_part;

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_query_multiskew_2.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_query_multiskew_2.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_query_multiskew_2.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/list_bucket_query_multiskew_2.q Thu Sep 12 01:21:10 2013
@@ -30,21 +30,21 @@ SELECT count(1) FROM fact_daily WHERE ds
 
 -- pruner only pick up default directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and value= 'val_484';
+explain extended SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and value= 'val_484' ORDER BY key, value;
 -- List Bucketing Query
-SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and value= 'val_484';
+SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and value= 'val_484' ORDER BY key, value;
 
 -- pruner only pick up default directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT key FROM fact_daily WHERE ds='1' and hr='4' and key= '406';
+explain extended SELECT key FROM fact_daily WHERE ds='1' and hr='4' and key= '406' ORDER BY key;
 -- List Bucketing Query
-SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and key= '406';
+SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and key= '406' ORDER BY key;
 
 -- pruner only pick up skewed-value directory
 -- explain plan shows which directory selected: Truncated Path -> Alias
-explain extended SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and ( (key='484' and value ='val_484')  or (key='238' and value= 'val_238')) ;
+explain extended SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and ( (key='484' and value ='val_484')  or (key='238' and value= 'val_238')) ORDER BY key, value;
 -- List Bucketing Query
-SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and ( (key='484' and value ='val_484')  or (key='238' and value= 'val_238')) ;
+SELECT key, value FROM fact_daily WHERE ds='1' and hr='4' and ( (key='484' and value ='val_484')  or (key='238' and value= 'val_238')) ORDER BY key, value;
 
 -- clean up
-drop table fact_daily;
\ No newline at end of file
+drop table fact_daily;

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/orc_dictionary_threshold.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/orc_dictionary_threshold.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/orc_dictionary_threshold.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/orc_dictionary_threshold.q Thu Sep 12 01:21:10 2013
@@ -9,7 +9,8 @@ ROW FORMAT SERDE 'org.apache.hadoop.hive
 STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat' 
 OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat';
 
-INSERT OVERWRITE TABLE test_orc SELECT key FROM src limit 10;
+-- should be single split
+INSERT OVERWRITE TABLE test_orc SELECT key FROM src TABLESAMPLE (10 ROWS);
 
 -- Test reading the column back
 

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/ptf.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/ptf.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/ptf.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/ptf.q Thu Sep 12 01:21:10 2013
@@ -196,7 +196,7 @@ rank() over (distribute by p_mfgr sort b
 dense_rank() over (distribute by p_mfgr sort by p_name) as dr, 
 sum(p_retailprice) over (distribute by p_mfgr sort by p_name rows between unbounded preceding and current row)  as s  
 INSERT OVERWRITE TABLE part_5 select  p_mfgr,p_name, p_size,  
-round(sum(p_size),1) over (distribute by p_mfgr sort by p_size range between 5 preceding and current row) as s2,
+round(sum(p_size) over (distribute by p_mfgr sort by p_size range between 5 preceding and current row),1) as s2,
 rank() over (distribute by p_mfgr sort by p_mfgr, p_name) as r, 
 dense_rank() over (distribute by p_mfgr sort by p_mfgr, p_name) as dr, 
 cume_dist() over (distribute by p_mfgr sort by p_mfgr, p_name) as cud, 

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/split_sample.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/split_sample.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/split_sample.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/split_sample.q Thu Sep 12 01:21:10 2013
@@ -110,3 +110,6 @@ select key from ss_src2 tablesample(10 R
 set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
 -- ROW type works with other input formats (others, don't)
 select count(1) from ss_src2 tablesample(10 ROWS);
+
+--HIVE-5061 row sampling in sub-query
+select * from (select * from src TABLESAMPLE (1 ROWS)) x;

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/udf4.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/udf4.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/udf4.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/udf4.q Thu Sep 12 01:21:10 2013
@@ -3,6 +3,49 @@ CREATE TABLE dest1(c1 STRING) STORED AS 
 FROM src INSERT OVERWRITE TABLE dest1 SELECT '  abc  ' WHERE src.key = 86;
 
 EXPLAIN
-SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), sqrt(1.0), sqrt(-1.0), sqrt(0.0), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1;
+SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), sqrt(1.0), sqrt(-1.0), sqrt(0.0), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, 
 
-SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), sqrt(1.0), sqrt(-1.0), sqrt(0.0), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, ~1 FROM dest1;
+~1, 
+~CAST(1 AS TINYINT), 
+~CAST(1 AS SMALLINT), 
+~CAST(1 AS BIGINT), 
+
+CAST(1 AS TINYINT) & CAST(2 AS TINYINT), 
+CAST(1 AS SMALLINT) & CAST(2 AS SMALLINT), 
+1 & 2, 
+CAST(1 AS BIGINT) & CAST(2 AS BIGINT),
+
+CAST(1 AS TINYINT) | CAST(2 AS TINYINT),
+CAST(1 AS SMALLINT) | CAST(2 AS SMALLINT),
+1 | 2,
+CAST(1 AS BIGINT) | CAST(2 AS BIGINT),
+
+CAST(1 AS TINYINT) ^ CAST(3 AS TINYINT),
+CAST(1 AS SMALLINT) ^ CAST(3 AS SMALLINT),
+1 ^ 3,
+CAST(1 AS BIGINT) ^ CAST(3 AS BIGINT)
+
+FROM dest1;
+
+SELECT round(1.0), round(1.5), round(-1.5), floor(1.0), floor(1.5), floor(-1.5), sqrt(1.0), sqrt(-1.0), sqrt(0.0), ceil(1.0), ceil(1.5), ceil(-1.5), ceiling(1.0), rand(3), +3, -3, 1++2, 1+-2, 
+~1, 
+~CAST(1 AS TINYINT), 
+~CAST(1 AS SMALLINT), 
+~CAST(1 AS BIGINT), 
+
+CAST(1 AS TINYINT) & CAST(2 AS TINYINT), 
+CAST(1 AS SMALLINT) & CAST(2 AS SMALLINT), 
+1 & 2, 
+CAST(1 AS BIGINT) & CAST(2 AS BIGINT),
+
+CAST(1 AS TINYINT) | CAST(2 AS TINYINT), 
+CAST(1 AS SMALLINT) | CAST(2 AS SMALLINT), 
+1 | 2, 
+CAST(1 AS BIGINT) | CAST(2 AS BIGINT),
+
+CAST(1 AS TINYINT) ^ CAST(3 AS TINYINT),
+CAST(1 AS SMALLINT) ^ CAST(3 AS SMALLINT),
+1 ^ 3,
+CAST(1 AS BIGINT) ^ CAST(3 AS BIGINT)
+ 
+FROM dest1;

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/udf_pmod.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/udf_pmod.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/udf_pmod.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/udf_pmod.q Thu Sep 12 01:21:10 2013
@@ -9,3 +9,12 @@ FROM src LIMIT 1;
 
 SELECT pmod(100,19), pmod(50,125), pmod(300,15)
 FROM src LIMIT 1;
+
+SELECT pmod(CAST(-100 AS TINYINT),CAST(9 AS TINYINT)), pmod(CAST(-50 AS TINYINT),CAST(101 AS TINYINT)), pmod(CAST(-100 AS TINYINT),CAST(29 AS TINYINT)) FROM src LIMIT 1;
+SELECT pmod(CAST(-100 AS SMALLINT),CAST(9 AS SMALLINT)), pmod(CAST(-50 AS SMALLINT),CAST(101 AS SMALLINT)), pmod(CAST(-100 AS SMALLINT),CAST(29 AS SMALLINT)) FROM src LIMIT 1;
+SELECT pmod(CAST(-100 AS BIGINT),CAST(9 AS BIGINT)), pmod(CAST(-50 AS BIGINT),CAST(101 AS BIGINT)), pmod(CAST(-100 AS BIGINT),CAST(29 AS BIGINT)) FROM src LIMIT 1;
+
+SELECT pmod(CAST(-100.91 AS FLOAT),CAST(9.8 AS FLOAT)), pmod(CAST(-50.1 AS FLOAT),CAST(101.8 AS FLOAT)), pmod(CAST(-100.91 AS FLOAT),CAST(29.75 AS FLOAT)) FROM src LIMIT 1;
+SELECT pmod(CAST(-100.91 AS DOUBLE),CAST(9.8 AS DOUBLE)), pmod(CAST(-50.1 AS DOUBLE),CAST(101.8 AS DOUBLE)), pmod(CAST(-100.91 AS DOUBLE),CAST(29.75 AS DOUBLE)) FROM src LIMIT 1;
+SELECT pmod(CAST(-100.91 AS DECIMAL),CAST(9.8 AS DECIMAL)), pmod(CAST(-50.1 AS DECIMAL),CAST(101.8 AS DECIMAL)), pmod(CAST(-100.91 AS DECIMAL),CAST(29.75 AS DECIMAL)) FROM src LIMIT 1;
+

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/windowing.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/windowing.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/windowing.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/windowing.q Thu Sep 12 01:21:10 2013
@@ -218,14 +218,14 @@ group by p_mfgr, p_brand;
 select * 
 from (
 select p_mfgr, p_brand, s, 
-round(sum(s),2) over w1  as s1
+round(sum(s) over w1 , 2)  as s1
 from mfgr_price_view 
 window w1 as (distribute by p_mfgr sort by p_mfgr )
 ) sq
 order by p_mfgr, p_brand;
 
 select p_mfgr, p_brand, s, 
-round(sum(s),2) over w1  as s1
+round(sum(s) over w1 ,2)  as s1
 from mfgr_price_view 
 window w1 as (distribute by p_mfgr sort by p_brand rows between 2 preceding and current row);
 
@@ -283,7 +283,7 @@ select  p_mfgr,p_name, p_size,  
 rank() over(distribute by p_mfgr sort by p_name) as r, 
 dense_rank() over(distribute by p_mfgr sort by p_name) as dr, 
 cume_dist() over(distribute by p_mfgr sort by p_name) as cud, 
-round(sum(p_size),1) over (distribute by p_mfgr sort by p_size range between 5 preceding and current row) as s2, 
+round(sum(p_size) over (distribute by p_mfgr sort by p_size range between 5 preceding and current row),1) as s2, 
 first_value(p_size) over w1  as fv1
 window w1 as (distribute by p_mfgr sort by p_mfgr, p_name rows between 2 preceding and 2 following) 
 INSERT OVERWRITE TABLE part_3 
@@ -387,10 +387,10 @@ from part;    
 
 -- 37. testPartitioningVariousForms
 select p_mfgr,
-round(sum(p_retailprice),2) over (partition by p_mfgr order by p_mfgr) as s1,
+round(sum(p_retailprice) over (partition by p_mfgr order by p_mfgr),2) as s1,
 min(p_retailprice) over (partition by p_mfgr) as s2,
 max(p_retailprice) over (distribute by p_mfgr sort by p_mfgr) as s3,
-round(avg(p_retailprice),2) over (distribute by p_mfgr) as s4,
+round(avg(p_retailprice) over (distribute by p_mfgr),2) as s4,
 count(p_retailprice) over (cluster by p_mfgr ) as s5
 from part;
 
@@ -428,7 +428,7 @@ select p_mfgr, p_name, p_size,
         
 -- 44. testOverNoPartitionSingleAggregate
 select p_name, p_retailprice,
-round(avg(p_retailprice),2) over()
+round(avg(p_retailprice) over(),2)
 from part
 order by p_name;
         

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/windowing_expressions.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/windowing_expressions.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/windowing_expressions.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/windowing_expressions.q Thu Sep 12 01:21:10 2013
@@ -35,17 +35,15 @@ create table over10k(
 load data local inpath '../data/files/over10k' into table over10k;
 
 select p_mfgr, p_retailprice, p_size,
-round(sum(p_retailprice),2) = round(sum(lag(p_retailprice,1,0.0)) + last_value(p_retailprice),2) 
-  over(distribute by p_mfgr sort by p_retailprice),
-max(p_retailprice) - min(p_retailprice) = last_value(p_retailprice) - first_value(p_retailprice)
-  over(distribute by p_mfgr sort by p_retailprice)
+round(sum(p_retailprice) over w1 , 2) = round(sum(lag(p_retailprice,1,0.0)) over w1 + last_value(p_retailprice) over w1 , 2), 
+max(p_retailprice) over w1 - min(p_retailprice) over w1 = last_value(p_retailprice) over w1 - first_value(p_retailprice) over w1
 from part
+window w1 as (distribute by p_mfgr sort by p_retailprice)
 ;
-
 select p_mfgr, p_retailprice, p_size,
 rank() over (distribute by p_mfgr sort by p_retailprice) as r,
 sum(p_retailprice) over (distribute by p_mfgr sort by p_retailprice rows between unbounded preceding and current row) as s2,
-sum(p_retailprice) - 5 over (distribute by p_mfgr sort by p_retailprice rows between unbounded preceding and current row) as s1
+sum(p_retailprice) over (distribute by p_mfgr sort by p_retailprice rows between unbounded preceding and current row) -5 as s1
 from part
 ;
 
@@ -66,7 +64,7 @@ select * from t1 limit 3;
 select * from t2 limit 3;
 
 select p_mfgr, p_retailprice, p_size,
-round(sum(p_retailprice),2) + 50.0 = round(sum(lag(p_retailprice,1,50.0)) + last_value(p_retailprice),2) 
-  over(distribute by p_mfgr sort by p_retailprice)
+round(sum(p_retailprice) over w1 , 2) + 50.0 = round(sum(lag(p_retailprice,1,50.0)) over w1 + (last_value(p_retailprice) over w1),2)
 from part
+window w1 as (distribute by p_mfgr sort by p_retailprice)
 limit 11;

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/windowing_windowspec.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/windowing_windowspec.q?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/windowing_windowspec.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/windowing_windowspec.q Thu Sep 12 01:21:10 2013
@@ -31,4 +31,6 @@ select s, sum(i) over(partition by ts or
 
 select f, sum(f) over (partition by ts order by f range between unbounded preceding and current row) from over10k limit 100;
 
+select s, i, round(avg(d) over (partition by s order by i) / 10.0 , 2) from over10k limit 7;
 
+select s, i, round((avg(d) over  w1 + 10.0) - (avg(d) over w1 - 10.0),2) from over10k window w1 as (partition by s order by i) limit 7;

Modified: hive/branches/vectorization/ql/src/test/resources/orc-file-dump.out
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/resources/orc-file-dump.out?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/resources/orc-file-dump.out (original)
+++ hive/branches/vectorization/ql/src/test/resources/orc-file-dump.out Thu Sep 12 01:21:10 2013
@@ -53,31 +53,31 @@ Stripes:
     Encoding column 1: DIRECT_V2
     Encoding column 2: DIRECT_V2
     Encoding column 3: DICTIONARY_V2
-  Stripe: offset: 191873 data: 63796 rows: 5000 tail: 74 index: 119
-    Stream: column 0 section ROW_INDEX start: 191873 length 10
-    Stream: column 1 section ROW_INDEX start: 191883 length 35
-    Stream: column 2 section ROW_INDEX start: 191918 length 39
-    Stream: column 3 section ROW_INDEX start: 191957 length 35
-    Stream: column 1 section DATA start: 191992 length 20029
-    Stream: column 2 section DATA start: 212021 length 40035
-    Stream: column 3 section DATA start: 252056 length 3574
-    Stream: column 3 section LENGTH start: 255630 length 25
-    Stream: column 3 section DICTIONARY_DATA start: 255655 length 133
+  Stripe: offset: 200000 data: 63796 rows: 5000 tail: 74 index: 119
+    Stream: column 0 section ROW_INDEX start: 200000 length 10
+    Stream: column 1 section ROW_INDEX start: 200010 length 35
+    Stream: column 2 section ROW_INDEX start: 200045 length 39
+    Stream: column 3 section ROW_INDEX start: 200084 length 35
+    Stream: column 1 section DATA start: 200119 length 20029
+    Stream: column 2 section DATA start: 220148 length 40035
+    Stream: column 3 section DATA start: 260183 length 3574
+    Stream: column 3 section LENGTH start: 263757 length 25
+    Stream: column 3 section DICTIONARY_DATA start: 263782 length 133
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DIRECT_V2
     Encoding column 3: DICTIONARY_V2
-  Stripe: offset: 255862 data: 12940 rows: 1000 tail: 71 index: 120
-    Stream: column 0 section ROW_INDEX start: 255862 length 10
-    Stream: column 1 section ROW_INDEX start: 255872 length 36
-    Stream: column 2 section ROW_INDEX start: 255908 length 39
-    Stream: column 3 section ROW_INDEX start: 255947 length 35
-    Stream: column 1 section DATA start: 255982 length 4007
-    Stream: column 2 section DATA start: 259989 length 8007
-    Stream: column 3 section DATA start: 267996 length 768
-    Stream: column 3 section LENGTH start: 268764 length 25
-    Stream: column 3 section DICTIONARY_DATA start: 268789 length 133
+  Stripe: offset: 263989 data: 12940 rows: 1000 tail: 71 index: 120
+    Stream: column 0 section ROW_INDEX start: 263989 length 10
+    Stream: column 1 section ROW_INDEX start: 263999 length 36
+    Stream: column 2 section ROW_INDEX start: 264035 length 39
+    Stream: column 3 section ROW_INDEX start: 264074 length 35
+    Stream: column 1 section DATA start: 264109 length 4007
+    Stream: column 2 section DATA start: 268116 length 8007
+    Stream: column 3 section DATA start: 276123 length 768
+    Stream: column 3 section LENGTH start: 276891 length 25
+    Stream: column 3 section DICTIONARY_DATA start: 276916 length 133
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DIRECT_V2
-    Encoding column 3: DICTIONARY_V2
\ No newline at end of file
+    Encoding column 3: DICTIONARY_V2

Modified: hive/branches/vectorization/ql/src/test/results/clientnegative/lateral_view_join.q.out
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/results/clientnegative/lateral_view_join.q.out?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/results/clientnegative/lateral_view_join.q.out (original)
+++ hive/branches/vectorization/ql/src/test/results/clientnegative/lateral_view_join.q.out Thu Sep 12 01:21:10 2013
@@ -1,2 +1,2 @@
-FAILED: ParseException line 1:62 missing AS at 'myTable' near '<EOF>'
+FAILED: ParseException line 1:62 missing EOF at 'myTable' near 'AS'