You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@drill.apache.org by GitBox <gi...@apache.org> on 2019/10/29 12:30:10 UTC

[GitHub] [drill] arina-ielchiieva commented on a change in pull request #1883: DRILL-7418: MetadataDirectGroupScan improvements

arina-ielchiieva commented on a change in pull request #1883: DRILL-7418: MetadataDirectGroupScan improvements
URL: https://github.com/apache/drill/pull/1883#discussion_r340045231
 
 

 ##########
 File path: exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestConvertCountToDirectScan.java
 ##########
 @@ -17,364 +17,426 @@
  */
 package org.apache.drill.exec.planner.logical;
 
-import org.apache.drill.PlanTestBase;
 import org.apache.drill.categories.PlannerTest;
+import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.ClusterFixtureBuilder;
+import org.apache.drill.test.ClusterTest;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import java.nio.file.Paths;
 
-@Category(PlannerTest.class)
-public class TestConvertCountToDirectScan extends PlanTestBase {
+import static org.junit.Assert.assertEquals;
+
+@Category({PlannerTest.class, UnlikelyTest.class})
+public class TestConvertCountToDirectScan extends ClusterTest {
 
   @BeforeClass
-  public static void setupTestFiles() {
+  public static void setup() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher);
     dirTestWatcher.copyResourceToRoot(Paths.get("directcount.parquet"));
+    startCluster(builder);
   }
 
   @Test
-  public void ensureCaseDoesNotConvertToDirectScan() throws Exception {
-    testPlanMatchingPatterns(
-        "select count(case when n_name = 'ALGERIA' and n_regionkey = 2 then n_nationkey else null end) as cnt\n" +
-            "from dfs.`directcount.parquet`", new String[]{"CASE"});
+  public void testCaseDoesNotConvertToDirectScan() throws Exception {
+    queryBuilder()
+      .sql("select " +
+      "count(case when n_name = 'ALGERIA' and n_regionkey = 2 then n_nationkey else null end) as cnt " +
+      "from dfs.`directcount.parquet`")
+      .planMatcher()
+      .include("CASE")
+      .match();
   }
 
   @Test
-  public void ensureConvertSimpleCountToDirectScan() throws Exception {
+  public void testConvertSimpleCountToDirectScan() throws Exception {
     String sql = "select count(*) as cnt from cp.`tpch/nation.parquet`";
-    testPlanMatchingPatterns(sql, new String[]{"DynamicPojoRecordReader"});
+
+    queryBuilder()
+      .sql(sql)
+      .planMatcher()
+      .include("DynamicPojoRecordReader")
+      .match();
 
     testBuilder()
-        .sqlQuery(sql)
-        .unOrdered()
-        .baselineColumns("cnt")
-        .baselineValues(25L)
-        .go();
+      .sqlQuery(sql)
+      .unOrdered()
+      .baselineColumns("cnt")
+      .baselineValues(25L)
+      .go();
   }
 
   @Test
-  public void ensureConvertSimpleCountConstToDirectScan() throws Exception {
+  public void testConvertSimpleCountConstToDirectScan() throws Exception {
     String sql = "select count(100) as cnt from cp.`tpch/nation.parquet`";
-    testPlanMatchingPatterns(sql, new String[]{"DynamicPojoRecordReader"});
+
+    queryBuilder()
+      .sql(sql)
+      .planMatcher()
+      .include("DynamicPojoRecordReader")
+      .match();
 
     testBuilder()
-        .sqlQuery(sql)
-        .unOrdered()
-        .baselineColumns("cnt")
-        .baselineValues(25L)
-        .go();
+      .sqlQuery(sql)
+      .unOrdered()
+      .baselineColumns("cnt")
+      .baselineValues(25L)
+      .go();
   }
 
   @Test
-  public void ensureConvertSimpleCountConstExprToDirectScan() throws Exception {
+  public void testConvertSimpleCountConstExprToDirectScan() throws Exception {
     String sql = "select count(1 + 2) as cnt from cp.`tpch/nation.parquet`";
-    testPlanMatchingPatterns(sql, new String[]{"DynamicPojoRecordReader"});
+
+    queryBuilder()
+      .sql(sql)
+      .planMatcher()
+      .include("DynamicPojoRecordReader")
+      .match();
 
     testBuilder()
-        .sqlQuery(sql)
-        .unOrdered()
-        .baselineColumns("cnt")
-        .baselineValues(25L)
-        .go();
+      .sqlQuery(sql)
+      .unOrdered()
+      .baselineColumns("cnt")
+      .baselineValues(25L)
+      .go();
   }
 
   @Test
-  public void ensureDoesNotConvertForDirectoryColumns() throws Exception {
+  public void testDoesNotConvertForDirectoryColumns() throws Exception {
     String sql = "select count(dir0) as cnt from cp.`tpch/nation.parquet`";
-    testPlanMatchingPatterns(sql, new String[]{"ParquetGroupScan"});
+
+    queryBuilder()
+      .sql(sql)
+      .planMatcher()
+      .include("ParquetGroupScan")
+      .match();
 
     testBuilder()
-        .sqlQuery(sql)
-        .unOrdered()
-        .baselineColumns("cnt")
-        .baselineValues(0L)
-        .go();
+      .sqlQuery(sql)
+      .unOrdered()
+      .baselineColumns("cnt")
+      .baselineValues(0L)
+      .go();
   }
 
   @Test
-  public void ensureConvertForImplicitColumns() throws Exception {
+  public void testConvertForImplicitColumns() throws Exception {
     String sql = "select count(fqn) as cnt from cp.`tpch/nation.parquet`";
-    testPlanMatchingPatterns(sql, new String[]{"DynamicPojoRecordReader"});
+
+    queryBuilder()
+      .sql(sql)
+      .planMatcher()
+      .include("DynamicPojoRecordReader")
+      .match();
 
     testBuilder()
-        .sqlQuery(sql)
-        .unOrdered()
-        .baselineColumns("cnt")
-        .baselineValues(25L)
-        .go();
+      .sqlQuery(sql)
+      .unOrdered()
+      .baselineColumns("cnt")
+      .baselineValues(25L)
+      .go();
   }
 
   @Test
   public void ensureConvertForSeveralColumns() throws Exception {
-    test("use dfs.tmp");
+    run("use dfs.tmp");
     String tableName = "parquet_table_counts";
 
     try {
       String newFqnColumnName = "new_fqn";
-      test("alter session set `%s` = '%s'", ExecConstants.IMPLICIT_FQN_COLUMN_LABEL, newFqnColumnName);
-      test("create table %s as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
-      test("refresh table metadata %s", tableName);
+      client.alterSession(ExecConstants.IMPLICIT_FQN_COLUMN_LABEL, newFqnColumnName);
+      run("create table %s as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
+      run("refresh table metadata %s", tableName);
 
       String sql = String.format("select\n" +
-          "count(%s) as implicit_count,\n" +
-          "count(*) as star_count,\n" +
-          "count(col_int) as int_column_count,\n" +
-          "count(col_vrchr) as vrchr_column_count\n" +
-          "from %s", newFqnColumnName, tableName);
-
-      testPlanMatchingPatterns(sql, new String[]{"DynamicPojoRecordReader"});
+        "count(%s) as implicit_count,\n" +
+        "count(*) as star_count,\n" +
+        "count(col_int) as int_column_count,\n" +
+        "count(col_vrchr) as vrchr_column_count\n" +
+        "from %s", newFqnColumnName, tableName);
+
+      queryBuilder()
+        .sql(sql)
+        .planMatcher()
+        .include("DynamicPojoRecordReader")
+        .match();
 
       testBuilder()
-          .sqlQuery(sql)
-          .unOrdered()
-          .baselineColumns("implicit_count", "star_count", "int_column_count", "vrchr_column_count")
-          .baselineValues(6L, 6L, 2L, 3L)
-          .go();
+        .sqlQuery(sql)
+        .unOrdered()
+        .baselineColumns("implicit_count", "star_count", "int_column_count", "vrchr_column_count")
+        .baselineValues(6L, 6L, 2L, 3L)
+        .go();
 
     } finally {
-      test("alter session reset `%s`", ExecConstants.IMPLICIT_FQN_COLUMN_LABEL);
-      test("drop table if exists %s", tableName);
+      client.resetSession(ExecConstants.IMPLICIT_FQN_COLUMN_LABEL);
+      run("drop table if exists %s", tableName);
     }
   }
 
   @Test
-  public void ensureCorrectCountWithMissingStatistics() throws Exception {
-    test("use dfs.tmp");
+  public void testCorrectCountWithMissingStatistics() throws Exception {
+    run("use dfs.tmp");
     String tableName = "wide_str_table";
     try {
       // table will contain two partitions: one - with null value, second - with non null value
-      test("create table %s partition by (col_str) as select * from cp.`parquet/wide_string.parquet`", tableName);
+      run("create table %s partition by (col_str) as select * from cp.`parquet/wide_string.parquet`", tableName);
 
-      String query = String.format("select count(col_str) as cnt_str, count(*) as cnt_total from %s", tableName);
+      String sql = String.format("select count(col_str) as cnt_str, count(*) as cnt_total from %s", tableName);
 
       // direct scan should not be applied since we don't have statistics
-      testPlanMatchingPatterns(query, null, new String[]{"DynamicPojoRecordReader"});
+      queryBuilder()
+        .sql(sql)
+        .planMatcher()
+        .exclude("DynamicPojoRecordReader")
+        .match();
 
       testBuilder()
-        .sqlQuery(query)
+        .sqlQuery(sql)
         .unOrdered()
         .baselineColumns("cnt_str", "cnt_total")
         .baselineValues(1L, 2L)
         .go();
     } finally {
-      test("drop table if exists %s", tableName);
+      run("drop table if exists %s", tableName);
     }
   }
 
   @Test
   public void testCountsWithMetadataCacheSummary() throws Exception {
-    test("use dfs.tmp");
+    run("use dfs.tmp");
+
     String tableName = "parquet_table_counts";
 
     try {
-      test(String.format("create table `%s/1` as select * from cp.`parquet/alltypes_optional.parquet`", tableName));
-      test(String.format("create table `%s/2` as select * from cp.`parquet/alltypes_optional.parquet`", tableName));
-      test(String.format("create table `%s/3` as select * from cp.`parquet/alltypes_optional.parquet`", tableName));
-      test(String.format("create table `%s/4` as select * from cp.`parquet/alltypes_optional.parquet`", tableName));
+      run("create table `%s/1` as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
+      run("create table `%s/2` as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
+      run("create table `%s/3` as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
+      run("create table `%s/4` as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
 
-      test("refresh table metadata %s", tableName);
+      run("refresh table metadata %s", tableName);
 
       String sql = String.format("select\n" +
-              "count(*) as star_count,\n" +
-              "count(col_int) as int_column_count,\n" +
-              "count(col_vrchr) as vrchr_column_count\n" +
-              "from %s", tableName);
-
-      int expectedNumFiles = 1;
-      String numFilesPattern = "numFiles = " + expectedNumFiles;
-      String usedMetaSummaryPattern = "usedMetadataSummaryFile = true";
-      String recordReaderPattern = "DynamicPojoRecordReader";
-
-      testPlanMatchingPatterns(sql, new String[]{numFilesPattern, usedMetaSummaryPattern, recordReaderPattern});
+        "count(*) as star_count,\n" +
+        "count(col_int) as int_column_count,\n" +
+        "count(col_vrchr) as vrchr_column_count\n" +
+        "from %s", tableName);
+
+      queryBuilder()
+        .sql(sql)
+        .planMatcher()
+        .include("numFiles = 1")
+        .include("usedMetadataSummaryFile = true")
+        .include("DynamicPojoRecordReader")
+        .match();
 
       testBuilder()
-          .sqlQuery(sql)
-          .unOrdered()
-          .baselineColumns("star_count", "int_column_count", "vrchr_column_count")
-          .baselineValues(24L, 8L, 12L)
-          .go();
+        .sqlQuery(sql)
+        .unOrdered()
+        .baselineColumns("star_count", "int_column_count", "vrchr_column_count")
+        .baselineValues(24L, 8L, 12L)
+        .go();
 
     } finally {
-      test("drop table if exists %s", tableName);
+      run("drop table if exists %s", tableName);
     }
   }
 
   @Test
   public void testCountsWithMetadataCacheSummaryAndDirPruning() throws Exception {
-    test("use dfs.tmp");
+    run("use dfs.tmp");
     String tableName = "parquet_table_counts";
 
     try {
-      test(String.format("create table `%s/1` as select * from cp.`parquet/alltypes_optional.parquet`", tableName));
-      test(String.format("create table `%s/2` as select * from cp.`parquet/alltypes_optional.parquet`", tableName));
-      test(String.format("create table `%s/3` as select * from cp.`parquet/alltypes_optional.parquet`", tableName));
-      test(String.format("create table `%s/4` as select * from cp.`parquet/alltypes_optional.parquet`", tableName));
+      run("create table `%s/1` as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
+      run("create table `%s/2` as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
+      run("create table `%s/3` as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
+      run("create table `%s/4` as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
 
-      test("refresh table metadata %s", tableName);
+      run("refresh table metadata %s", tableName);
 
       String sql = String.format("select\n" +
-              "count(*) as star_count,\n" +
-              "count(col_int) as int_column_count,\n" +
-              "count(col_vrchr) as vrchr_column_count\n" +
-              "from %s where dir0 = 1 ", tableName);
-
-      int expectedNumFiles = 1;
-      String numFilesPattern = "numFiles = " + expectedNumFiles;
-      String usedMetaSummaryPattern = "usedMetadataSummaryFile = true";
-      String recordReaderPattern = "DynamicPojoRecordReader";
-
-      testPlanMatchingPatterns(sql, new String[]{numFilesPattern, usedMetaSummaryPattern, recordReaderPattern});
+        "count(*) as star_count,\n" +
+        "count(col_int) as int_column_count,\n" +
+        "count(col_vrchr) as vrchr_column_count\n" +
+        "from %s where dir0 = 1 ", tableName);
+
+      queryBuilder()
+        .sql(sql)
+        .planMatcher()
+        .include("numFiles = 1")
+        .include("usedMetadataSummaryFile = true")
+        .include("DynamicPojoRecordReader")
+        .match();
 
       testBuilder()
-          .sqlQuery(sql)
-          .unOrdered()
-          .baselineColumns("star_count", "int_column_count", "vrchr_column_count")
-          .baselineValues(6L, 2L, 3L)
-          .go();
+        .sqlQuery(sql)
+        .unOrdered()
+        .baselineColumns("star_count", "int_column_count", "vrchr_column_count")
+        .baselineValues(6L, 2L, 3L)
+        .go();
 
     } finally {
-      test("drop table if exists %s", tableName);
+      run("drop table if exists %s", tableName);
     }
   }
 
   @Test
   public void testCountsWithWildCard() throws Exception {
-    test("use dfs.tmp");
+    run("use dfs.tmp");
     String tableName = "parquet_table_counts";
 
     try {
       for (int i = 0; i < 10; i++) {
-        test(String.format("create table `%s/12/%s` as select * from cp.`tpch/nation.parquet`", tableName, i));
+        run("create table `%s/12/%s` as select * from cp.`tpch/nation.parquet`", tableName, i);
       }
-      test(String.format("create table `%s/2` as select * from cp.`tpch/nation.parquet`", tableName));
-      test(String.format("create table `%s/2/11` as select * from cp.`tpch/nation.parquet`", tableName));
-      test(String.format("create table `%s/2/12` as select * from cp.`tpch/nation.parquet`", tableName));
+      run("create table `%s/2` as select * from cp.`tpch/nation.parquet`", tableName);
+      run("create table `%s/2/11` as select * from cp.`tpch/nation.parquet`", tableName);
+      run("create table `%s/2/12` as select * from cp.`tpch/nation.parquet`", tableName);
 
-      test("refresh table metadata %s", tableName);
+      run("refresh table metadata %s", tableName);
 
       String sql = String.format("select\n" +
-              "count(*) as star_count\n" +
-              "from `%s/1*`", tableName);
-
-      String usedMetaSummaryPattern = "usedMetadataSummaryFile = false";
-      String recordReaderPattern = "DynamicPojoRecordReader";
+        "count(*) as star_count\n" +
+        "from `%s/1*`", tableName);
 
-      testPlanMatchingPatterns(sql, new String[]{usedMetaSummaryPattern, recordReaderPattern});
+      queryBuilder()
+        .sql(sql)
+        .planMatcher()
+        .include("usedMetadataSummaryFile = false")
+        .include("DynamicPojoRecordReader")
+        .match();
 
       testBuilder()
-          .sqlQuery(sql)
-          .unOrdered()
-          .baselineColumns("star_count")
-          .baselineValues(250L)
-          .go();
+        .sqlQuery(sql)
+        .unOrdered()
+        .baselineColumns("star_count")
+        .baselineValues(250L)
+        .go();
 
     } finally {
-      test("drop table if exists %s", tableName);
+      run("drop table if exists %s", tableName);
     }
   }
 
   @Test
   public void testCountsForLeafDirectories() throws Exception {
-    test("use dfs.tmp");
+    run("use dfs.tmp");
     String tableName = "parquet_table_counts";
 
     try {
-      test("create table `%s/1` as select * from cp.`tpch/nation.parquet`", tableName);
-      test("create table `%s/2` as select * from cp.`tpch/nation.parquet`", tableName);
-      test("create table `%s/3` as select * from cp.`tpch/nation.parquet`", tableName);
-      test("refresh table metadata %s", tableName);
+      run("create table `%s/1` as select * from cp.`tpch/nation.parquet`", tableName);
+      run("create table `%s/2` as select * from cp.`tpch/nation.parquet`", tableName);
+      run("create table `%s/3` as select * from cp.`tpch/nation.parquet`", tableName);
+      run("refresh table metadata %s", tableName);
 
       String sql = String.format("select\n" +
-              "count(*) as star_count\n" +
-              "from `%s/1`", tableName);
-
-      int expectedNumFiles = 1;
-      String numFilesPattern = "numFiles = " + expectedNumFiles;
-      String usedMetaSummaryPattern = "usedMetadataSummaryFile = true";
-      String recordReaderPattern = "DynamicPojoRecordReader";
+        "count(*) as star_count\n" +
+        "from `%s/1`", tableName);
 
-      testPlanMatchingPatterns(sql, new String[]{numFilesPattern, usedMetaSummaryPattern, recordReaderPattern});
+      queryBuilder()
+        .sql(sql)
+        .planMatcher()
+        .include("numFiles = 1")
+        .include("usedMetadataSummaryFile = true")
+        .include("DynamicPojoRecordReader")
+        .match();
 
       testBuilder()
-          .sqlQuery(sql)
-          .unOrdered()
-          .baselineColumns("star_count")
-          .baselineValues(25L)
-          .go();
+        .sqlQuery(sql)
+        .unOrdered()
+        .baselineColumns("star_count")
+        .baselineValues(25L)
+        .go();
 
     } finally {
-      test("drop table if exists %s", tableName);
+      run("drop table if exists %s", tableName);
     }
   }
 
   @Test
   public void testCountsForDirWithFilesAndDir() throws Exception {
-    test("use dfs.tmp");
+    run("use dfs.tmp");
     String tableName = "parquet_table_counts";
 
     try {
-      test("create table `%s/1` as select * from cp.`tpch/nation.parquet`", tableName);
-      test("create table `%s/1/2` as select * from cp.`tpch/nation.parquet`", tableName);
-      test("create table `%s/1/3` as select * from cp.`tpch/nation.parquet`", tableName);
-      test("refresh table metadata %s", tableName);
+      run("create table `%s/1` as select * from cp.`tpch/nation.parquet`", tableName);
+      run("create table `%s/1/2` as select * from cp.`tpch/nation.parquet`", tableName);
+      run("create table `%s/1/3` as select * from cp.`tpch/nation.parquet`", tableName);
+      run("refresh table metadata %s", tableName);
 
       String sql = String.format("select count(*) as star_count from `%s/1`", tableName);
 
-      int expectedNumFiles = 1;
-      String numFilesPattern = "numFiles = " + expectedNumFiles;
-      String usedMetaSummaryPattern = "usedMetadataSummaryFile = true";
-      String recordReaderPattern = "DynamicPojoRecordReader";
-
-      testPlanMatchingPatterns(sql, new String[]{numFilesPattern, usedMetaSummaryPattern, recordReaderPattern});
+      queryBuilder()
+        .sql(sql)
+        .planMatcher()
+        .include("numFiles = 1")
+        .include("usedMetadataSummaryFile = true")
+        .include("DynamicPojoRecordReader")
+        .match();
 
       testBuilder()
-          .sqlQuery(sql)
-          .unOrdered()
-          .baselineColumns("star_count")
-          .baselineValues(75L)
-          .go();
+        .sqlQuery(sql)
+        .unOrdered()
+        .baselineColumns("star_count")
+        .baselineValues(75L)
+        .go();
 
     } finally {
-      test("drop table if exists %s", tableName);
+      run("drop table if exists %s", tableName);
     }
   }
 
   @Test
-  public void testCountsWithNonExColumn() throws Exception {
-    test("use dfs.tmp");
+  public void testCountsWithNonExistingColumn() throws Exception {
+    run("use dfs.tmp");
     String tableName = "parquet_table_counts_nonex";
 
     try {
-      test(String.format("create table `%s/1` as select * from cp.`parquet/alltypes_optional.parquet`", tableName));
-      test(String.format("create table `%s/2` as select * from cp.`parquet/alltypes_optional.parquet`", tableName));
-      test(String.format("create table `%s/3` as select * from cp.`parquet/alltypes_optional.parquet`", tableName));
-      test(String.format("create table `%s/4` as select * from cp.`parquet/alltypes_optional.parquet`", tableName));
+      run("create table `%s/1` as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
+      run("create table `%s/2` as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
+      run("create table `%s/3` as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
+      run("create table `%s/4` as select * from cp.`parquet/alltypes_optional.parquet`", tableName);
 
-      test("refresh table metadata %s", tableName);
+      run("refresh table metadata %s", tableName);
 
       String sql = String.format("select\n" +
-              "count(*) as star_count,\n" +
-              "count(col_int) as int_column_count,\n" +
-              "count(col_vrchr) as vrchr_column_count,\n" +
-              "count(non_existent) as non_existent\n" +
-              "from %s", tableName);
-
-      String usedMetaSummaryPattern = "usedMetadataSummaryFile = true";
-      String recordReaderPattern = "DynamicPojoRecordReader";
-
-      testPlanMatchingPatterns(sql, new String[]{usedMetaSummaryPattern, recordReaderPattern});
+        "count(*) as star_count,\n" +
+        "count(col_int) as int_column_count,\n" +
+        "count(col_vrchr) as vrchr_column_count,\n" +
+        "count(non_existent) as non_existent\n" +
+        "from %s", tableName);
+
+      queryBuilder()
+        .sql(sql)
+        .planMatcher()
+        .include("numFiles = 1")
+        .include("usedMetadataSummaryFile = true")
+        .include("DynamicPojoRecordReader")
+        .match();
 
       testBuilder()
-              .sqlQuery(sql)
-              .unOrdered()
-              .baselineColumns("star_count", "int_column_count", "vrchr_column_count", "non_existent" )
-              .baselineValues(24L, 8L, 12L, 0L)
-              .go();
+        .sqlQuery(sql)
+        .unOrdered()
+        .baselineColumns("star_count", "int_column_count", "vrchr_column_count", "non_existent" )
+        .baselineValues(24L, 8L, 12L, 0L)
+        .go();
 
     } finally {
-      test("drop table if exists %s", tableName);
+      run("drop table if exists %s", tableName);
     }
   }
+
+  @Test
+  public void testSerDe() throws Exception {
+    String sql = "select count(*) as cnt from cp.`tpch/nation.parquet`";
+    String plan = queryBuilder().sql(sql).explainJson();
+    long cnt = queryBuilder().physical(plan).singletonLong();
+    assertEquals("Counts should match",25L, cnt);
 
 Review comment:
   Fixed.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services