You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by da...@apache.org on 2014/09/11 01:14:28 UTC

svn commit: r1624168 - in /pig/trunk: CHANGES.txt build.xml ivy.xml src/org/apache/pig/builtin/OrcStorage.java test/e2e/pig/tests/orc.conf test/org/apache/pig/builtin/TestOrcStoragePushdown.java

Author: daijy
Date: Wed Sep 10 23:14:27 2014
New Revision: 1624168

URL: http://svn.apache.org/r1624168
Log:
PIG-4102: Adding e2e tests and several improvements for Orc predicate pushdown

Modified:
    pig/trunk/CHANGES.txt
    pig/trunk/build.xml
    pig/trunk/ivy.xml
    pig/trunk/src/org/apache/pig/builtin/OrcStorage.java
    pig/trunk/test/e2e/pig/tests/orc.conf
    pig/trunk/test/org/apache/pig/builtin/TestOrcStoragePushdown.java

Modified: pig/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/pig/trunk/CHANGES.txt?rev=1624168&r1=1624167&r2=1624168&view=diff
==============================================================================
--- pig/trunk/CHANGES.txt (original)
+++ pig/trunk/CHANGES.txt Wed Sep 10 23:14:27 2014
@@ -70,6 +70,8 @@ OPTIMIZATIONS
  
 BUG FIXES
 
+PIG-4102: Adding e2e tests and several improvements for Orc predicate pushdown (daijy)
+
 PIG-4156: [PATCH] fix NPE when running scripts stored on hdfs:// (acoliver via daijy)
 
 PIG-4159: TestGroupConstParallelTez and TestJobSubmissionTez should be excluded in Hadoop 20 unit tests (cheolsoo)

Modified: pig/trunk/build.xml
URL: http://svn.apache.org/viewvc/pig/trunk/build.xml?rev=1624168&r1=1624167&r2=1624168&view=diff
==============================================================================
--- pig/trunk/build.xml (original)
+++ pig/trunk/build.xml Wed Sep 10 23:14:27 2014
@@ -731,6 +731,7 @@
             <fileset dir="${ivy.lib.dir}" includes="zookeeper-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="accumulo-*.jar" excludes="accumulo-minicluster*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="json-simple-*.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="kryo-*.jar"/>
         </copy>
     </target>
 

Modified: pig/trunk/ivy.xml
URL: http://svn.apache.org/viewvc/pig/trunk/ivy.xml?rev=1624168&r1=1624167&r2=1624168&view=diff
==============================================================================
--- pig/trunk/ivy.xml (original)
+++ pig/trunk/ivy.xml Wed Sep 10 23:14:27 2014
@@ -421,7 +421,7 @@
     <dependency org="org.iq80.snappy" name="snappy" rev="${snappy.version}"
       conf="test->master" />
     <dependency org="com.esotericsoftware.kryo" name="kryo" rev="${kryo.version}"
-      conf="test->master" />
+      conf="compile->master" />
 
     <dependency org="org.vafer" name="jdeb" rev="${jdeb.version}"
       conf="compile->master">

Modified: pig/trunk/src/org/apache/pig/builtin/OrcStorage.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/builtin/OrcStorage.java?rev=1624168&r1=1624167&r2=1624168&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/builtin/OrcStorage.java (original)
+++ pig/trunk/src/org/apache/pig/builtin/OrcStorage.java Wed Sep 10 23:14:27 2014
@@ -20,11 +20,11 @@ package org.apache.pig.builtin;
 import java.io.IOException;
 import java.math.BigDecimal;
 import java.math.BigInteger;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
-import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
@@ -37,7 +37,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.io.orc.CompressionKind;
 import org.apache.hadoop.hive.ql.io.orc.OrcFile;
@@ -51,7 +50,6 @@ import org.apache.hadoop.hive.ql.io.sarg
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument.Builder;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -498,8 +496,6 @@ public class OrcStorage extends LoadFunc
         for (ResourceFieldSchema field : schema.getFields()) {
             switch(field.getType()) {
             case DataType.BOOLEAN:
-                // TODO: ORC does not seem to support it
-                break;
             case DataType.INTEGER:
             case DataType.LONG:
             case DataType.FLOAT:
@@ -671,14 +667,12 @@ public class OrcStorage extends LoadFunc
     }
 
     private Object getSearchArgObjValue(Object value) {
-           // TODO Test BigInteger, BigInteger and DateTime
         if (value instanceof BigInteger) {
-            return HiveDecimal.create(((BigInteger)value));
+            return new BigDecimal((BigInteger)value);
         } else if (value instanceof BigDecimal) {
-            return HiveDecimal.create(((BigDecimal)value), false);
+            return value;
         } else if (value instanceof DateTime) {
-            //TODO is this right based on what DateTimeWritable.dateToDays() does? What about pig.datetime.default.tz?
-            return new DateWritable((int)(((DateTime)value).getMillis() / TimeUnit.DAYS.toMillis(1)));
+            return new Timestamp(((DateTime)value).getMillis());
         } else {
             return value;
         }

Modified: pig/trunk/test/e2e/pig/tests/orc.conf
URL: http://svn.apache.org/viewvc/pig/trunk/test/e2e/pig/tests/orc.conf?rev=1624168&r1=1624167&r2=1624168&view=diff
==============================================================================
--- pig/trunk/test/e2e/pig/tests/orc.conf (original)
+++ pig/trunk/test/e2e/pig/tests/orc.conf Wed Sep 10 23:14:27 2014
@@ -20,10 +20,11 @@ register :HIVELIBDIR:/hive-exec-:HIVEVER
 register :HIVELIBDIR:/hive-shims-common-:HIVEVERSION:.jar;
 register :HIVELIBDIR:/hive-shims-common-secure-:HIVEVERSION:.jar;
 register :HIVELIBDIR:/hive-shims-:HIVESHIMSVERSION:-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/kryo*.jar;
 a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int, gpa:float);
 store a into ':OUTPATH:.intermediate' using OrcStorage();
+exec
 b = load ':OUTPATH:.intermediate' using OrcStorage();
-describe b;
 c = filter b by age < 30;
 store c into ':OUTPATH:';\,
                         'verify_pig_script' => q\a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int, gpa:float);
@@ -41,8 +42,10 @@ register :HIVELIBDIR:/hive-exec-:HIVEVER
 register :HIVELIBDIR:/hive-shims-common-:HIVEVERSION:.jar;
 register :HIVELIBDIR:/hive-shims-common-secure-:HIVEVERSION:.jar;
 register :HIVELIBDIR:/hive-shims-:HIVESHIMSVERSION:-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/kryo*.jar;
 a = load ':INPATH:/singlefile/studentcomplextab10k' as (nameagegpamap:map[], nameagegpatuple:tuple(tname:chararray, tage:int, tgpa:float), nameagegpabag:bag{t:tuple(bname:chararray, bage:int, bgpa:float)});
 store a into ':OUTPATH:.intermediate' using OrcStorage();
+exec
 b = load ':OUTPATH:.intermediate' using OrcStorage();
 store b into ':OUTPATH:';\,
                         'verify_pig_script' => q\a = load ':INPATH:/singlefile/studentcomplextab10k' as (nameagegpamap:map[], nameagegpatuple:tuple(tname:chararray, tage:int, tgpa:float), nameagegpabag:bag{t:tuple(bname:chararray, bage:int, bgpa:float)});
@@ -59,14 +62,15 @@ register :HIVELIBDIR:/hive-exec-:HIVEVER
 register :HIVELIBDIR:/hive-shims-common-:HIVEVERSION:.jar;
 register :HIVELIBDIR:/hive-shims-common-secure-:HIVEVERSION:.jar;
 register :HIVELIBDIR:/hive-shims-:HIVESHIMSVERSION:-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/kryo*.jar;
 a = load ':INPATH:/singlefile/studentnulltab10k' as (name:chararray, age:int, gpa:float);
 store a into ':OUTPATH:.simple.intermediate' using OrcStorage();
+exec
 b = load ':INPATH:/singlefile/studentcomplextab10k' as (nameagegpamap:map[], nameagegpatuple:tuple(tname:chararray, tage:int, tgpa:float), nameagegpabag:bag{t:tuple(bname:chararray, bage:int, bgpa:float)}, nameagegpamap_name:chararray, nameagegpamap_age:int, nameagegpamap_gpa:float);
 store b into ':OUTPATH:.complex.intermediate' using OrcStorage();
+exec
 c = load ':OUTPATH:.simple.intermediate' using OrcStorage();
-describe c;
 d = load ':OUTPATH:.complex.intermediate' using OrcStorage();
-describe d;
 e = foreach c generate name, age, gpa;
 f = foreach d generate nameagegpamap#'name' as name, nameagegpamap#'age' as age, nameagegpamap#'gpa' as gpa, nameagegpatuple.tage as tage, FLATTEN(nameagegpabag) as (bname, bage, bgpa);
 g = join e by name, f by name;
@@ -95,15 +99,141 @@ register :HIVELIBDIR:/hive-exec-:HIVEVER
 register :HIVELIBDIR:/hive-shims-common-:HIVEVERSION:.jar;
 register :HIVELIBDIR:/hive-shims-common-secure-:HIVEVERSION:.jar;
 register :HIVELIBDIR:/hive-shims-:HIVESHIMSVERSION:-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/kryo*.jar;
 a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int, gpa:float);
 store a into ':OUTPATH:.orc_params.intermediate' using OrcStorage('-c ZLIB -s 67108864 -r 100000 -b 1048576 -p true -v 0.12');
+exec
 b = load ':OUTPATH:.orc_params.intermediate' using OrcStorage();
 store b into ':OUTPATH:';\,
                         'verify_pig_script' => q\a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int, gpa:float);
 store a into ':OUTPATH:';\,
                         }
                         ]
-                }
+                },
+                {
+                'name' => 'Orc_Pushdown',
+                'tests' => [
+# Test 1: Load (primitive) from PigStorage and store into OrcStorage
+# Also tests multiple load stores in same script
+                        {
+                        'num' => 1,
+                        'notmq' => 1,
+                        'pig' => q\
+register :HIVELIBDIR:/hive-common-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-serde-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-exec-:HIVEVERSION:-core.jar;
+register :HIVELIBDIR:/hive-shims-common-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-shims-common-secure-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-shims-:HIVESHIMSVERSION:-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/kryo*.jar;
+a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int, gpa:float);
+b = order a by name parallel 4;
+store b into ':OUTPATH:.intermediate' using OrcStorage();
+exec
+b = load ':OUTPATH:.intermediate' using OrcStorage();
+c = filter b by name < 'david falkner';
+store c into ':OUTPATH:';\,
+                        'verify_pig_script' => q\a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int, gpa:float);
+b = filter a by name < 'david falkner';
+store b into ':OUTPATH:';\,
+                        },
+                        {
+                        'num' => 2,
+                        'notmq' => 1,
+                        'pig' => q\
+register :HIVELIBDIR:/hive-common-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-serde-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-exec-:HIVEVERSION:-core.jar;
+register :HIVELIBDIR:/hive-shims-common-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-shims-common-secure-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-shims-:HIVESHIMSVERSION:-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/kryo*.jar;
+a = load ':INPATH:/singlefile/studenttab20m' as (name:chararray, age:int, gpa:float);
+b = order a by age desc parallel 4;
+store b into ':OUTPATH:.intermediate' using OrcStorage('-s 10000000');
+exec
+b = load ':OUTPATH:.intermediate' using OrcStorage();
+c = filter b by age <= 22;
+store c into ':OUTPATH:';\,
+                        'verify_pig_script' => q\a = load ':INPATH:/singlefile/studenttab20m' as (name:chararray, age:int, gpa:float);
+b = filter a by age <= 22;
+store b into ':OUTPATH:';\,
+                        },
+                        {
+                        'num' => 3,
+                        'notmq' => 1,
+                        'pig' => q\
+register :HIVELIBDIR:/hive-common-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-serde-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-exec-:HIVEVERSION:-core.jar;
+register :HIVELIBDIR:/hive-shims-common-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-shims-common-secure-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-shims-:HIVESHIMSVERSION:-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/kryo*.jar;
+a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int, gpa:float);
+b = order a by gpa parallel 4;
+store b into ':OUTPATH:.intermediate' using OrcStorage();
+exec
+b = load ':OUTPATH:.intermediate' using OrcStorage();
+c = filter b by gpa >= 3.2 and gpa < 3.5 and age > 30 + 2;
+store c into ':OUTPATH:';\,
+                        'verify_pig_script' => q\a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int, gpa:float);
+b = filter a by gpa >= 3.2 and gpa < 3.5 and age > 30 + 2;
+store b into ':OUTPATH:';\,
+                        },
+                        {
+                        'num' => 4,
+                        'notmq' => 1,
+                        'pig' => q\
+register :HIVELIBDIR:/hive-common-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-serde-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-exec-:HIVEVERSION:-core.jar;
+register :HIVELIBDIR:/hive-shims-common-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-shims-common-secure-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-shims-:HIVESHIMSVERSION:-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/kryo*.jar;
+a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int, gpa:bigdecimal);
+b = order a by gpa parallel 4;
+store b into ':OUTPATH:.intermediate' using OrcStorage();
+exec
+b = load ':OUTPATH:.intermediate' using OrcStorage();
+c = filter b by gpa >= 3.5;
+store c into ':OUTPATH:';\,
+                        'verify_pig_script' => q\a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int, gpa:bigdecimal);
+b = filter a by gpa >= 3.5;
+store b into ':OUTPATH:';\,
+                        'floatpostprocess' => 1,
+                        'delimiter' => '	',
+                        },
+                        {
+                        'num' => 5,
+                        'notmq' => 1,
+                        'pig' => q\
+register :HIVELIBDIR:/hive-common-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-serde-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-exec-:HIVEVERSION:-core.jar;
+register :HIVELIBDIR:/hive-shims-common-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-shims-common-secure-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/hive-shims-:HIVESHIMSVERSION:-:HIVEVERSION:.jar;
+register :HIVELIBDIR:/kryo*.jar;
+a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int, gpa:double);
+b = foreach a generate name, age, gpa, (age>35 ? ToDate('20100101', 'yyyyMMdd', 'UTC') : ToDate('20100105', 'yyyyMMdd', 'UTC')) as d;
+c = order b by d parallel 4;
+store c into ':OUTPATH:.intermediate' using OrcStorage();
+exec
+b = load ':OUTPATH:.intermediate' using OrcStorage();
+c = filter b by d >= ToDate('20100103', 'yyyyMMdd', 'UTC');
+d = foreach c generate name, age, gpa;
+store d into ':OUTPATH:';\,
+                        'verify_pig_script' => q\a = load ':INPATH:/singlefile/studenttab10k' as (name:chararray, age:int, gpa:bigdecimal);
+b = filter a by age<=35;
+store b into ':OUTPATH:';\,
+                        'floatpostprocess' => 1,
+                        'delimiter' => '	',
+                        },
+                        ]
+                },
+
         ]
 };
 

Modified: pig/trunk/test/org/apache/pig/builtin/TestOrcStoragePushdown.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/builtin/TestOrcStoragePushdown.java?rev=1624168&r1=1624167&r2=1624168&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/builtin/TestOrcStoragePushdown.java (original)
+++ pig/trunk/test/org/apache/pig/builtin/TestOrcStoragePushdown.java Wed Sep 10 23:14:27 2014
@@ -24,12 +24,14 @@ import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
+import java.math.BigDecimal;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
 
 import org.apache.commons.lang.RandomStringUtils;
+import org.apache.commons.lang.math.RandomUtils;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.log4j.FileAppender;
 import org.apache.log4j.Level;
@@ -51,6 +53,8 @@ import org.apache.pig.newplan.logical.ru
 import org.apache.pig.test.MiniGenericCluster;
 import org.apache.pig.test.Util;
 import org.apache.pig.tools.pigstats.JobStats;
+import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Assert;
@@ -126,25 +130,41 @@ public class TestOrcStoragePushdown {
         double[] dVal = new double[] {1000.11, 2000.22, 3000.33};
         StringBuilder sb = new StringBuilder();
         for (int i=1; i <= 10000; i++) {
-            sb.append((i > 900 && i < 1100) ? true : false).append("\t"); //boolean
+            sb.append((i > 6500 && i <= 9000) ? true : false).append("\t"); //boolean
             sb.append((i > 1000 && i < 3000) ? 1 : 5).append("\t"); //byte
             sb.append((i > 2500 && i <= 4500) ? 100 : 200).append("\t"); //short
             sb.append(i).append("\t"); //int
             sb.append(lVal[i%3]).append("\t"); //long
             sb.append(fVal[i%4]).append("\t"); //float
             sb.append((i > 2500 && i < 3500) ? dVal[i%3] : dVal[i%1]).append("\t"); //double
-            sb.append((i%2 == 1 ? "" : RandomStringUtils.random(100))).append("\t"); //bytearray
-            sb.append((i%2 == 0 ? "" : RandomStringUtils.random(100))).append("\n"); //string
-            //sb.append("").append("\t"); //datetime
-            //sb.append("").append("\n"); //bigdecimal
+            sb.append((i%2 == 1 ? "" : RandomStringUtils.random(100).replaceAll("\t", " ")
+                    .replaceAll("\n", " ").replaceAll("\r", " "))).append("\t"); //bytearray
+            sb.append((i%2 == 0 ? "" : RandomStringUtils.random(100).replaceAll("\t", " ")
+                    .replaceAll("\n", " ").replaceAll("\r", " "))).append("\t"); //string
+            int year;
+            if (i > 5000 && i <= 8000) { //datetime
+                year = RandomUtils.nextInt(4)+2010;
+            } else {
+                year = RandomUtils.nextInt(10)+2000;
+            }
+            sb.append(new DateTime(year, RandomUtils.nextInt(12)+1,
+                    RandomUtils.nextInt(28)+1, RandomUtils.nextInt(24), RandomUtils.nextInt(60),
+                    DateTimeZone.UTC).toString()).append("\t"); // datetime
+            String bigString;
+            if (i>7500) {
+                bigString = RandomStringUtils.randomNumeric(9) + "." + RandomStringUtils.randomNumeric(5);
+            } else {
+                bigString = "1" + RandomStringUtils.randomNumeric(9) + "." + RandomStringUtils.randomNumeric(5);
+            }
+            sb.append(new BigDecimal(bigString)).append("\n"); //bigdecimal
             bw.write(sb.toString());
             sb.setLength(0);
         }
         bw.close();
 
         // Store only 1000 rows in each row block (MIN_ROW_INDEX_STRIDE is 1000. So can't use less than that)
-        pigServer.registerQuery("A = load '" + inputTxtFile + "' as (f1:boolean, f2:int, f3:int, f4:int, f5:long, f6:float, f7:double, f8:bytearray, f9:chararray);");//, f10:datetime, f11:bigdecimal);");
-        pigServer.registerQuery("store A into '" + INPUT +"' using OrcStorage('-r 1000');");
+        pigServer.registerQuery("A = load '" + inputTxtFile + "' as (f1:boolean, f2:int, f3:int, f4:int, f5:long, f6:float, f7:double, f8:bytearray, f9:chararray, f10:datetime, f11:bigdecimal);");
+        pigServer.registerQuery("store A into '" + INPUT +"' using OrcStorage('-r 1000 -s 100000');");
         Util.copyFromLocalToCluster(cluster, INPUT, INPUT);
     }
 
@@ -290,35 +310,34 @@ public class TestOrcStoragePushdown {
                 "expr = leaf-0", sarg.toString());
     }
 
-    //@Test
+    @Test
     public void testPredicatePushdownBoolean() throws Exception {
-        testPredicatePushdownLocal("f1 == true", 10);
+        testPredicatePushdown("f1 == true", 2500, 1200000);
     }
 
     @Test
     public void testPredicatePushdownByteShort() throws Exception {
-        //TODO: BytesWithoutPushdown was 2373190 and bytesWithPushdown was 1929669
-        // Expected to see more difference only when 3 out of 10 blocks are read. Other tests too.
-        // Investigate why.
-        testPredicatePushdown("f2 != 5 or f3 == 100", 3500, 400000);
+        testPredicatePushdown("f2 != 5 or f3 == 100", 3500, 1200000);
     }
 
     @Test
     public void testPredicatePushdownIntLongString() throws Exception {
-        testPredicatePushdown("f4 >= 980 and f4 < 1010 and (f5 == 100 or f9 is not null)", 20, 800000);
+        testPredicatePushdown("f4 >= 980 and f4 < 1010 and (f5 == 100 or f9 is not null)", 20, 1200000);
     }
 
     @Test
     public void testPredicatePushdownFloatDouble() throws Exception {
-        testPredicatePushdown("f6 == 100.0 and f7 > 2000.00000001", 167, 800000);
+        testPredicatePushdown("f6 == 100.0 and f7 > 2000.00000001", 167, 1600000);
     }
 
-    //@Test
+    @Test
     public void testPredicatePushdownBigDecimal() throws Exception {
+        testPredicatePushdown("f11 < (bigdecimal)'1000000000';", 2500, 1600000);
     }
 
-    //@Test
+    @Test
     public void testPredicatePushdownTimestamp() throws Exception {
+        testPredicatePushdown("f10 >= ToDate('20100101', 'yyyyMMdd', 'UTC')", 3000, 400000);
     }
 
     private Expression getExpressionForTest(String query, List<String> predicateCols) throws Exception {