You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by le...@apache.org on 2022/11/14 09:13:28 UTC

[flink] branch release-1.16 updated: [FLINK-29992][hive] Fix Hive lookup join fail when column pushdown to Hive lookup table source

This is an automated email from the ASF dual-hosted git repository.

leonard pushed a commit to branch release-1.16
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.16 by this push:
     new c946b0b95b7 [FLINK-29992][hive] Fix Hive lookup join fail when column pushdown to Hive lookup table source
c946b0b95b7 is described below

commit c946b0b95b7b8396eac7f03019eb279becddd301
Author: yuxia Luo <lu...@alumni.sjtu.edu.cn>
AuthorDate: Mon Nov 14 17:13:19 2022 +0800

    [FLINK-29992][hive] Fix Hive lookup join fail when column pushdown to Hive lookup table source
    
    This closes #21310.
---
 .../flink/connectors/hive/HiveLookupTableSource.java  | 12 ++++++++++++
 .../apache/flink/connectors/hive/HiveTableSource.java |  6 +++---
 .../flink/connectors/hive/HiveLookupJoinITCase.java   | 19 +++++++++++++++++++
 3 files changed, 34 insertions(+), 3 deletions(-)

diff --git a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/connectors/hive/HiveLookupTableSource.java b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/connectors/hive/HiveLookupTableSource.java
index e68072affe0..09faa0fcef5 100644
--- a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/connectors/hive/HiveLookupTableSource.java
+++ b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/connectors/hive/HiveLookupTableSource.java
@@ -30,6 +30,7 @@ import org.apache.flink.connectors.hive.util.JobConfUtils;
 import org.apache.flink.table.catalog.CatalogTable;
 import org.apache.flink.table.catalog.ObjectPath;
 import org.apache.flink.table.catalog.hive.client.HiveShim;
+import org.apache.flink.table.connector.source.DynamicTableSource;
 import org.apache.flink.table.connector.source.LookupTableSource;
 import org.apache.flink.table.connector.source.TableFunctionProvider;
 import org.apache.flink.table.data.RowData;
@@ -90,6 +91,17 @@ public class HiveLookupTableSource extends HiveTableSource implements LookupTabl
         return TableFunctionProvider.of(getLookupFunction(context.getKeys()));
     }
 
+    @Override
+    public DynamicTableSource copy() {
+        HiveLookupTableSource source =
+                new HiveLookupTableSource(jobConf, flinkConf, tablePath, catalogTable);
+        source.remainingPartitions = remainingPartitions;
+        source.projectedFields = projectedFields;
+        source.limit = limit;
+        source.dynamicFilterPartitionKeys = dynamicFilterPartitionKeys;
+        return source;
+    }
+
     @VisibleForTesting
     TableFunction<RowData> getLookupFunction(int[][] keys) {
         int[] keyIndices = new int[keys.length];
diff --git a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/connectors/hive/HiveTableSource.java b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/connectors/hive/HiveTableSource.java
index edb752ab7e0..9cdccba20ba 100644
--- a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/connectors/hive/HiveTableSource.java
+++ b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/connectors/hive/HiveTableSource.java
@@ -108,10 +108,10 @@ public class HiveTableSource
 
     // Remaining partition specs after partition pruning is performed. Null if pruning is not pushed
     // down.
-    @Nullable private List<Map<String, String>> remainingPartitions = null;
-    @Nullable private List<String> dynamicFilterPartitionKeys = null;
+    @Nullable protected List<Map<String, String>> remainingPartitions = null;
+    @Nullable protected List<String> dynamicFilterPartitionKeys = null;
     protected int[] projectedFields;
-    private Long limit = null;
+    protected Long limit = null;
 
     public HiveTableSource(
             JobConf jobConf,
diff --git a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveLookupJoinITCase.java b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveLookupJoinITCase.java
index 5fd2a56269a..d1d96c9810c 100644
--- a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveLookupJoinITCase.java
+++ b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveLookupJoinITCase.java
@@ -357,6 +357,25 @@ public class HiveLookupJoinITCase {
                 .isEqualTo("[+I[1, a, 101, 2020, 08, 01], +I[2, b, 122, 2020, 08, 01]]");
     }
 
+    @Test
+    public void testLookupJoinWithLookUpSourceProjectPushDown() throws Exception {
+        TableEnvironment batchEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.HIVE);
+        batchEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
+        batchEnv.useCatalog(hiveCatalog.getName());
+        batchEnv.executeSql(
+                        "insert overwrite bounded_table values (1,'a',10),(2,'a',21),(2,'b',22),(3,'c',33)")
+                .await();
+        tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);
+        TableImpl flinkTable =
+                (TableImpl)
+                        tableEnv.sqlQuery(
+                                "select b.x, b.y from "
+                                        + " default_catalog.default_database.probe as p "
+                                        + " join bounded_table for system_time as of p.p as b on p.x=b.x and p.y=b.y");
+        List<Row> results = CollectionUtil.iteratorToList(flinkTable.execute().collect());
+        assertThat(results.toString()).isEqualTo("[+I[1, a], +I[2, b], +I[3, c]]");
+    }
+
     @Test
     public void testLookupJoinTableWithColumnarStorage() throws Exception {
         // constructs test data, as the DEFAULT_SIZE of VectorizedColumnBatch is 2048, we should