You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@doris.apache.org by mo...@apache.org on 2022/05/17 03:30:29 UTC

[incubator-doris] branch master updated: [feature-wip](hudi) Step1: Support create hudi external table (#9559)

This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 72e0042efb [feature-wip](hudi) Step1: Support create hudi external table (#9559)
72e0042efb is described below

commit 72e0042efb675debafb66cfed5f41ca990c88d92
Author: dujl <du...@gmail.com>
AuthorDate: Tue May 17 11:30:23 2022 +0800

    [feature-wip](hudi) Step1: Support create hudi external table (#9559)
    
    support create hudi table
    support show create table for hudi table
    
    ### Design
    1. create hudi table without schema(recommanded)
    ```sql
        CREATE [EXTERNAL] TABLE table_name
        ENGINE = HUDI
        [COMMENT "comment"]
        PROPERTIES (
        "hudi.database" = "hudi_db_in_hive_metastore",
        "hudi.table" = "hudi_table_in_hive_metastore",
        "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
        );
    ```
    
    2. create hudi table with schema
    ```sql
        CREATE [EXTERNAL] TABLE table_name
        [(column_definition1[, column_definition2, ...])]
        ENGINE = HUDI
        [COMMENT "comment"]
        PROPERTIES (
        "hudi.database" = "hudi_db_in_hive_metastore",
        "hudi.table" = "hudi_table_in_hive_metastore",
        "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
        );
    ```
    When create hudi table with schema, the columns must exist in corresponding table in hive metastore.
---
 docs/.vuepress/sidebar/en.js                       |   3 +-
 docs/.vuepress/sidebar/zh-CN.js                    |   3 +-
 .../external-table/hudi-external-table.md          | 137 +++++++++++++++++++++
 .../Create/CREATE-EXTERNAL-TABLE.md                |  42 ++++++-
 .../external-table/hudi-external-table.md          | 134 ++++++++++++++++++++
 .../Create/CREATE-EXTERNAL-TABLE.md                |  40 +++++-
 fe/fe-core/pom.xml                                 |  12 ++
 .../org/apache/doris/analysis/CreateTableStmt.java |   9 +-
 .../java/org/apache/doris/catalog/Catalog.java     |  53 ++++++++
 .../doris/catalog/HiveMetaStoreClientHelper.java   |  23 ++++
 .../main/java/org/apache/doris/catalog/Table.java  |   9 +-
 .../apache/doris/external/hudi/HudiProperty.java   |  29 +++++
 .../org/apache/doris/external/hudi/HudiTable.java  | 125 +++++++++++++++++++
 .../org/apache/doris/external/hudi/HudiUtils.java  | 128 +++++++++++++++++++
 .../java/org/apache/doris/qe/ShowExecutor.java     |   1 +
 .../apache/doris/analysis/CreateTableStmtTest.java |  17 +++
 fe/pom.xml                                         |  17 ++-
 gensrc/thrift/Descriptors.thrift                   |   7 ++
 gensrc/thrift/Types.thrift                         |   3 +-
 19 files changed, 782 insertions(+), 10 deletions(-)

diff --git a/docs/.vuepress/sidebar/en.js b/docs/.vuepress/sidebar/en.js
index 5ccf75082a..0517c63ef8 100644
--- a/docs/.vuepress/sidebar/en.js
+++ b/docs/.vuepress/sidebar/en.js
@@ -224,7 +224,8 @@ module.exports = [
           "doris-on-es",
           "odbc-of-doris",
           "hive-of-doris",
-          "iceberg-of-doris"
+          "iceberg-of-doris",
+          "hudi-external-table"
         ],
       },
       "audit-plugin",
diff --git a/docs/.vuepress/sidebar/zh-CN.js b/docs/.vuepress/sidebar/zh-CN.js
index 330c25db57..4421e26d4d 100644
--- a/docs/.vuepress/sidebar/zh-CN.js
+++ b/docs/.vuepress/sidebar/zh-CN.js
@@ -224,7 +224,8 @@ module.exports = [
           "doris-on-es",
           "odbc-of-doris",
           "hive-of-doris",
-          "iceberg-of-doris"
+          "iceberg-of-doris",
+          "hudi-external-table"
         ],
       },
       "audit-plugin",
diff --git a/docs/en/ecosystem/external-table/hudi-external-table.md b/docs/en/ecosystem/external-table/hudi-external-table.md
new file mode 100644
index 0000000000..20d8df7c97
--- /dev/null
+++ b/docs/en/ecosystem/external-table/hudi-external-table.md
@@ -0,0 +1,137 @@
+---
+{
+    "title": "Doris Hudi external table",
+    "language": "en"
+}
+---
+
+<!-- 
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+# Hudi External Table of Doris
+
+Hudi External Table of Doris provides Doris with the ability to access hdui external tables directly, eliminating the need for cumbersome data import and leveraging Doris' own OLAP capabilities to solve hudi table data analysis problems.
+
+ 1. support hudi data sources for Doris
+ 2. Support joint query between Doris and hdui data source tables to perform more complex analysis operations
+
+This document introduces how to use this feature and the considerations.
+
+## Glossary
+
+### Noun in Doris
+
+* FE: Frontend, the front-end node of Doris, responsible for metadata management and request access
+* BE: Backend, the backend node of Doris, responsible for query execution and data storage
+
+## How to use
+
+### Create Hudi External Table 
+
+Hudi tables can be created in Doris with or without schema. You do not need to declare the column definitions of the table when creating an external table, Doris can resolve the column definitions of the table in hive metastore when querying the table.
+
+1. Create a separate external table to mount the Hudi table.  
+   The syntax can be viewed in `HELP CREATE TABLE`.
+
+    ```sql
+    -- Syntax
+    CREATE [EXTERNAL] TABLE table_name
+    [(column_definition1[, column_definition2, ...])]
+    ENGINE = HUDI
+    [COMMENT "comment"]
+    PROPERTIES (
+    "hudi.database" = "hudi_db_in_hive_metastore",
+    "hudi.table" = "hudi_table_in_hive_metastore",
+    "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
+    );
+
+
+    -- Example: Mount hudi_table_in_hive_metastore under hudi_db_in_hive_metastore in Hive MetaStore 
+    CREATE TABLE `t_hudi` 
+    ENGINE = HUDI
+    PROPERTIES (
+    "hudi.database" = "hudi_db_in_hive_metastore",
+    "hudi.table" = "hudi_table_in_hive_metastore",
+    "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
+    );
+    
+    -- Example:Mount hudi table with schema.
+    CREATE TABLE `t_hudi` (
+        `id` int NOT NULL COMMENT "id number",
+        `name` varchar(10) NOT NULL COMMENT "user name"
+    ) ENGINE = HUDI
+    PROPERTIES (
+    "hudi.database" = "hudi_db_in_hive_metastore",
+    "hudi.table" = "hudi_table_in_hive_metastore",
+    "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
+    );
+    ```
+
+
+#### Parameter Description
+- column_definition
+  -  When create hudi table without schema(recommended), doris will resolve columns from hive metastore when query.
+  -  When create hudi table with schema, the columns must exist in corresponding table in hive metastore.
+- ENGINE needs to be specified as HUDI
+- PROPERTIES property.
+    - `hudi.hive.metastore.uris`: Hive Metastore service address
+    - `hudi.database`: the name of the database to which Hudi is mounted
+    - `hudi.table`: the name of the table to which Hudi is mounted, not required when mounting Hudi database.
+
+### Show table structure
+
+Show table structure can be viewed by `HELP SHOW CREATE TABLE`.
+    
+
+
+## Data Type Matching
+
+The supported Hudi column types correspond to Doris in the following table.
+
+|  Hudi  | Doris  |             Description              |
+| :------: | :----: | :-------------------------------: |
+|   BOOLEAN  | BOOLEAN  |                         |
+|   INTEGER   |  INT  |                       |
+|   LONG | BIGINT |              |
+|   FLOAT   | FLOAT |  |
+|   DOUBLE  | DOUBLE |  |
+|   DATE  | DATE |  |
+|   TIMESTAMP   |  DATETIME  | Timestamp to Datetime with loss of precision |
+|   STRING   |  STRING  |                                   |
+|   UUID  | VARCHAR | Use VARCHAR instead | 
+|   DECIMAL  | DECIMAL |  |
+|   TIME  | - | not supported |
+|   FIXED  | - | not supported |
+|   BINARY  | - | not supported |
+|   STRUCT  | - | not supported |
+|   LIST  | - | not supported |
+|   MAP  | - | not supported |
+
+**Note:** 
+- The current default supported version of hudi is 0.10.0 and has not been tested in other versions. More versions will be supported in the future.
+
+
+### Query Usage
+
+Once you have finished building the hdui external table in Doris, it is no different from a normal Doris OLAP table except that you cannot use the data models in Doris (rollup, preaggregation, materialized views, etc.)
+
+```sql
+select * from t_hudi where k1 > 1000 and k3 = 'term' or k4 like '%doris';
+```
+
diff --git a/docs/en/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-EXTERNAL-TABLE.md b/docs/en/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-EXTERNAL-TABLE.md
index 3c5421310f..8041797963 100644
--- a/docs/en/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-EXTERNAL-TABLE.md
+++ b/docs/en/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-EXTERNAL-TABLE.md
@@ -34,7 +34,7 @@ CREATE EXTERNAL TABLE
 
 This statement is used to create an external table, see [CREATE TABLE](./CREATE-TABLE.md) for the specific syntax.
 
-Which type of external table is mainly identified by the ENGINE type, currently MYSQL, BROKER, HIVE, ICEBERG are optional
+Which type of external table is mainly identified by the ENGINE type, currently MYSQL, BROKER, HIVE, ICEBERG, HUDI are optional
 
 1. If it is mysql, you need to provide the following information in properties:
 
@@ -111,6 +111,20 @@ Which type of external table is mainly identified by the ENGINE type, currently
    hive.metastore.uris is the hive metastore service address;
    catalog.type defaults to HIVE_CATALOG. Currently only HIVE_CATALOG is supported, more Iceberg catalog types will be supported in the future.
 
+5. In case of hudi, you need to provide the following information in properties:
+
+   ```sql
+   PROPERTIES (
+   "hudi.database" = "hudi_db_in_hive_metastore",
+   "hudi.table" = "hudi_table_in_hive_metastore",
+   "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
+   )
+   ````
+
+   Where hudi.database is the corresponding database name in HiveMetaStore;
+   hudi.table is the corresponding table name in HiveMetaStore;
+   hive.metastore.uris is the hive metastore service address;
+
 ### Example
 
 1. Create a MYSQL external table
@@ -225,6 +239,32 @@ Which type of external table is mainly identified by the ENGINE type, currently
    );
    ````
 
+5. Create an Hudi external table
+
+   create hudi table without schema(recommend)
+   ```sql
+   CREATE TABLE example_db.t_hudi
+   ENGINE=HUDI
+   PROPERTIES (
+   "hudi.database" = "hudi_db_in_hive_metastore",
+   "hudi.table" = "hudi_table_in_hive_metastore",
+   "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
+   );
+   ````
+
+   create hudi table with schema
+   ```sql
+   CREATE TABLE example_db.t_hudi (
+      `id` int NOT NULL COMMENT "id number",
+      `name` varchar(10) NOT NULL COMMENT "user name"
+   )
+   ENGINE=HUDI
+   PROPERTIES (
+   "hudi.database" = "hudi_db_in_hive_metastore",
+   "hudi.table" = "hudi_table_in_hive_metastore",
+   "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
+   );
+   ````
 
 ### Keywords
 
diff --git a/docs/zh-CN/ecosystem/external-table/hudi-external-table.md b/docs/zh-CN/ecosystem/external-table/hudi-external-table.md
new file mode 100644
index 0000000000..f077e71456
--- /dev/null
+++ b/docs/zh-CN/ecosystem/external-table/hudi-external-table.md
@@ -0,0 +1,134 @@
+---
+{
+    "title": "Doris Hudi external table",
+    "language": "zh-CN"
+}
+---
+
+<!-- 
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+# Hudi External Table of Doris
+
+Hudi External Table of Doris 提供了 Doris 直接访问 Hudi 外部表的能力,外部表省去了繁琐的数据导入工作,并借助 Doris 本身的 OLAP 的能力来解决 Hudi 表的数据分析问题:
+
+1. 支持 Hudi 数据源接入Doris
+2. 支持 Doris 与 Hive数据源Hudi中的表联合查询,进行更加复杂的分析操作
+
+本文档主要介绍该功能的使用方式和注意事项等。
+
+## 名词解释
+
+### Doris 相关
+
+* FE:Frontend,Doris 的前端节点,负责元数据管理和请求接入
+* BE:Backend,Doris 的后端节点,负责查询执行和数据存储
+
+## 使用方法
+
+### Doris 中创建 Hudi 的外表
+
+可以通过以下两种方式在 Doris 中创建 Hudi 外表。建外表时无需声明表的列定义,Doris 可以在查询时从HiveMetaStore中获取列信息。
+
+1. 创建一个单独的外表,用于挂载 Hudi 表。  
+   具体相关语法,可以通过 [CREATE TABLE](../../sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-TABLE.md) 查看。
+
+    ```sql
+    -- 语法
+    CREATE [EXTERNAL] TABLE table_name
+    [(column_definition1[, column_definition2, ...])]
+    ENGINE = HUDI
+    [COMMENT "comment"]
+    PROPERTIES (
+    "hudi.database" = "hudi_db_in_hive_metastore",
+    "hudi.table" = "hudi_table_in_hive_metastore",
+    "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
+    );
+
+
+    -- 例子:挂载 HiveMetaStore 中 hudi_db_in_hive_metastore 下的 hudi_table_in_hive_metastore,挂载时不指定schema。
+    CREATE TABLE `t_hudi` 
+    ENGINE = HUDI
+    PROPERTIES (
+    "hudi.database" = "hudi_db_in_hive_metastore",
+    "hudi.table" = "hudi_table_in_hive_metastore",
+    "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
+    );
+
+    -- 例子:挂载时指定schema
+    CREATE TABLE `t_hudi` (
+        `id` int NOT NULL COMMENT "id number",
+        `name` varchar(10) NOT NULL COMMENT "user name"
+    ) ENGINE = HUDI
+    PROPERTIES (
+    "hudi.database" = "hudi_db_in_hive_metastore",
+    "hudi.table" = "hudi_table_in_hive_metastore",
+    "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
+    );
+    ```
+
+
+#### 参数说明:
+
+- 外表列
+    - 可以不指定列名,这时查询时会从HiveMetaStore中获取列信息,推荐这种建表方式
+    - 指定列名时指定的列名要在 Hudi 表中存在
+- ENGINE 需要指定为 HUDI
+- PROPERTIES 属性:
+    - `hudi.hive.metastore.uris`:Hive Metastore 服务地址
+    - `hudi.database`:挂载 Hudi 对应的数据库名
+    - `hudi.table`:挂载 Hudi 对应的表名
+
+### 展示表结构
+
+展示表结构可以通过 [SHOW CREATE TABLE](../../sql-manual/sql-reference/Show-Statements/SHOW-CREATE-TABLE.md) 查看。
+
+## 类型匹配
+
+支持的 Hudi 列类型与 Doris 对应关系如下表:
+
+|  Hudi  | Doris  |             描述              |
+| :------: | :----: | :-------------------------------: |
+|   BOOLEAN  | BOOLEAN  |                         |
+|   INTEGER   |  INT  |                       |
+|   LONG | BIGINT |              |
+|   FLOAT   | FLOAT |  |
+|   DOUBLE  | DOUBLE |  |
+|   DATE  | DATE |  |
+|   TIMESTAMP   |  DATETIME  | Timestamp 转成 Datetime 会损失精度 |
+|   STRING   |  STRING  |                                   |
+|   UUID  | VARCHAR | 使用 VARCHAR 来代替 | 
+|   DECIMAL  | DECIMAL |  |
+|   TIME  | - | 不支持 |
+|   FIXED  | - | 不支持 |
+|   BINARY  | - | 不支持 |
+|   STRUCT  | - | 不支持 |
+|   LIST  | - | 不支持 |
+|   MAP  | - | 不支持 |
+
+**注意:**
+- 当前默认支持的 Hudi 版本为 0.10.0,未在其他版本进行测试。后续后支持更多版本。
+
+### 查询用法
+
+完成在 Doris 中建立 Hudi 外表后,除了无法使用 Doris 中的数据模型(rollup、预聚合、物化视图等)外,与普通的 Doris OLAP 表并无区别
+
+```sql
+select * from t_hudi where k1 > 1000 and k3 ='term' or k4 like '%doris';
+```
diff --git a/docs/zh-CN/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-EXTERNAL-TABLE.md b/docs/zh-CN/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-EXTERNAL-TABLE.md
index 4ecbaa858b..3f32a452ac 100644
--- a/docs/zh-CN/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-EXTERNAL-TABLE.md
+++ b/docs/zh-CN/sql-manual/sql-reference/Data-Definition-Statements/Create/CREATE-EXTERNAL-TABLE.md
@@ -35,7 +35,7 @@ CREATE EXTERNAL TABLE
 此语句用来创建外部表,具体语法参阅 [CREATE TABLE](./CREATE-TABLE.md)。
 
 主要通过 ENGINE 类型来标识是哪种类型的外部表,目前可选 MYSQL、BROKER、HIVE、ICEBERG
-
+、HUDI
 1. 如果是 mysql,则需要在 properties 提供以下信息:
 
    ```sql
@@ -111,6 +111,18 @@ CREATE EXTERNAL TABLE
    hive.metastore.uris 是 hive metastore 服务地址; 
    catalog.type 默认为 HIVE_CATALOG。当前仅支持 HIVE_CATALOG,后续会支持更多 Iceberg catalog 类型。
 
+5. 如果是 hudi,则需要在 properties 中提供以下信息:
+
+   ```sql
+   PROPERTIES (
+   "hudi.database" = "hudi_db_in_hive_metastore",
+   "hudi.table" = "hudi_table_in_hive_metastore",
+   "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
+   )
+   ````
+
+   其中 hudi.database 是 hive 表对应的库名字,hudi.table 是 hive 表的名字,hive.metastore.uris 是 hive metastore 服务地址。
+
 ### Example
 
 1. 创建MYSQL外部表
@@ -225,6 +237,32 @@ CREATE EXTERNAL TABLE
    );
    ```
 
+5. 创建一个 Hudi 外表
+
+   创建时不指定schema(推荐)
+   ```sql
+   CREATE TABLE example_db.t_hudi
+   ENGINE=HUDI
+   PROPERTIES (
+   "hudi.database" = "hudi_db_in_hive_metastore",
+   "hudi.table" = "hudi_table_in_hive_metastore",
+   "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
+   );
+   ````
+
+   创建时指定schema
+   ```sql
+   CREATE TABLE example_db.t_hudi (
+      `id` int NOT NULL COMMENT "id number",
+      `name` varchar(10) NOT NULL COMMENT "user name"
+   )
+   ENGINE=HUDI
+   PROPERTIES (
+   "hudi.database" = "hudi_db_in_hive_metastore",
+   "hudi.table" = "hudi_table_in_hive_metastore",
+   "hudi.hive.metastore.uris" = "thrift://127.0.0.1:9083"
+   );
+   ````
 
 ### Keywords
 
diff --git a/fe/fe-core/pom.xml b/fe/fe-core/pom.xml
index 4ff84acc19..009cf9df90 100644
--- a/fe/fe-core/pom.xml
+++ b/fe/fe-core/pom.xml
@@ -617,6 +617,18 @@ under the License.
             <scope>provided</scope>
         </dependency>
 
+        <!-- https://mvnrepository.com/artifact/org.apache.hudi/hudi-common -->
+        <dependency>
+            <groupId>org.apache.hudi</groupId>
+            <artifactId>hudi-common</artifactId>
+        </dependency>
+
+        <!-- https://mvnrepository.com/artifact/org.apache.hudi/hudi-hadoop-mr -->
+        <dependency>
+            <groupId>org.apache.hudi</groupId>
+            <artifactId>hudi-hadoop-mr</artifactId>
+        </dependency>
+
         <dependency> 
             <groupId>org.mariadb.jdbc</groupId>
             <artifactId>mariadb-java-client</artifactId>
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java
index dde61afa78..5c2db118bb 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java
@@ -87,6 +87,7 @@ public class CreateTableStmt extends DdlStmt {
         engineNames.add("elasticsearch");
         engineNames.add("hive");
         engineNames.add("iceberg");
+        engineNames.add("hudi");
     }
 
     // for backup. set to -1 for normal use
@@ -166,7 +167,7 @@ public class CreateTableStmt extends DdlStmt {
         this.rollupAlterClauseList = rollupAlterClauseList == null ? new ArrayList<>() : rollupAlterClauseList;
     }
 
-    // This is for iceberg table, which has no column schema
+    // This is for iceberg/hudi table, which has no column schema
     public CreateTableStmt(boolean ifNotExists,
                            boolean isExternal,
                            TableName tableName,
@@ -358,7 +359,8 @@ public class CreateTableStmt extends DdlStmt {
         }
 
         // analyze column def
-        if (!engineName.equals("iceberg") && (columnDefs == null || columnDefs.isEmpty())) {
+        if (!(engineName.equals("iceberg") || engineName.equals("hudi"))
+                && (columnDefs == null || columnDefs.isEmpty())) {
             ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLE_MUST_HAVE_COLUMNS);
         }
         // add a hidden column as delete flag for unique table
@@ -480,7 +482,8 @@ public class CreateTableStmt extends DdlStmt {
         }
 
         if (engineName.equals("mysql") || engineName.equals("odbc") || engineName.equals("broker")
-                || engineName.equals("elasticsearch") || engineName.equals("hive") || engineName.equals("iceberg")) {
+                || engineName.equals("elasticsearch") || engineName.equals("hive")
+                || engineName.equals("iceberg") || engineName.equals("hudi")) {
             if (!isExternal) {
                 // this is for compatibility
                 isExternal = true;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Catalog.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Catalog.java
index c75c636fef..69fe069258 100755
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Catalog.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Catalog.java
@@ -150,6 +150,9 @@ import org.apache.doris.deploy.impl.AmbariDeployManager;
 import org.apache.doris.deploy.impl.K8sDeployManager;
 import org.apache.doris.deploy.impl.LocalFileDeployManager;
 import org.apache.doris.external.elasticsearch.EsRepository;
+import org.apache.doris.external.hudi.HudiProperty;
+import org.apache.doris.external.hudi.HudiTable;
+import org.apache.doris.external.hudi.HudiUtils;
 import org.apache.doris.external.iceberg.IcebergCatalogMgr;
 import org.apache.doris.external.iceberg.IcebergTableCreationRecordMgr;
 import org.apache.doris.ha.BDBHA;
@@ -3076,6 +3079,9 @@ public class Catalog {
         } else if (engineName.equalsIgnoreCase("iceberg")) {
             IcebergCatalogMgr.createIcebergTable(db, stmt);
             return;
+        } else if (engineName.equalsIgnoreCase("hudi")) {
+            createHudiTable(db, stmt);
+            return;
         } else {
             ErrorReport.reportDdlException(ErrorCode.ERR_UNKNOWN_STORAGE_ENGINE, engineName);
         }
@@ -4105,6 +4111,44 @@ public class Catalog {
         LOG.info("successfully create table[{}-{}]", tableName, tableId);
     }
 
+    private void createHudiTable(Database db, CreateTableStmt stmt) throws DdlException {
+        String tableName = stmt.getTableName();
+        List<Column> columns = stmt.getColumns();
+        long tableId = getNextId();
+        HudiTable hudiTable = new HudiTable(tableId, tableName, columns, stmt.getProperties());
+        hudiTable.setComment(stmt.getComment());
+        // check hudi properties in create stmt.
+        HudiUtils.validateCreateTable(hudiTable);
+        // check hudi table whether exists in hive database
+        String metastoreUris = hudiTable.getTableProperties().get(HudiProperty.HUDI_HIVE_METASTORE_URIS);
+        HiveMetaStoreClient hiveMetaStoreClient = HiveMetaStoreClientHelper.getClient(metastoreUris);
+        if (!HiveMetaStoreClientHelper.tableExists(hiveMetaStoreClient,
+                hudiTable.getHmsDatabaseName(), hudiTable.getHmsTableName())) {
+            throw new DdlException(String.format("Table [%s] dose not exist in Hive Metastore.",
+                    hudiTable.getHmsTableIdentifer()));
+        }
+        org.apache.hadoop.hive.metastore.api.Table hiveTable = HiveMetaStoreClientHelper.getTable(
+                hudiTable.getHmsDatabaseName(),
+                hudiTable.getHmsTableName(),
+                metastoreUris);
+        if (!HudiUtils.isHudiTable(hiveTable)) {
+            throw new DdlException(String.format("Table [%s] is not a hudi table.", hudiTable.getHmsTableIdentifer()));
+        }
+        // after support snapshot query for mor, we should remove the check.
+        if (HudiUtils.isHudiRealtimeTable(hiveTable)) {
+            throw new DdlException(String.format("Can not support hudi realtime table.", hudiTable.getHmsTableName()));
+        }
+        // check table's schema when user specify the schema
+        if (!hudiTable.getFullSchema().isEmpty()) {
+            HudiUtils.validateColumns(hudiTable, hiveTable);
+        }
+        // check hive table if exists in doris database
+        if (!db.createTableWithLock(hudiTable, false, stmt.isSetIfNotExists()).first) {
+            ErrorReport.reportDdlException(ErrorCode.ERR_TABLE_EXISTS_ERROR, tableName);
+        }
+        LOG.info("successfully create table[{}-{}]", tableName, tableId);
+    }
+
     public static void getDdlStmt(Table table, List<String> createTableStmt, List<String> addPartitionStmt,
                                   List<String> createRollupStmt, boolean separatePartition, boolean hidePassword) {
         getDdlStmt(null, null, table, createTableStmt, addPartitionStmt, createRollupStmt, separatePartition, hidePassword);
@@ -4412,6 +4456,15 @@ public class Catalog {
             sb.append("\"iceberg.table\" = \"").append(icebergTable.getIcebergTbl()).append("\",\n");
             sb.append(new PrintableMap<>(icebergTable.getIcebergProperties(), " = ", true, true, false).toString());
             sb.append("\n)");
+        } else if (table.getType() == TableType.HUDI) {
+            HudiTable hudiTable = (HudiTable) table;
+            if (!Strings.isNullOrEmpty(table.getComment())) {
+                sb.append("\nCOMMENT \"").append(table.getComment(true)).append("\"");
+            }
+            // properties
+            sb.append("\nPROPERTIES (\n");
+            sb.append(new PrintableMap<>(hudiTable.getTableProperties(), " = ", true, true, false).toString());
+            sb.append("\n)");
         }
 
         createTableStmt.add(sb.toString());
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/HiveMetaStoreClientHelper.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/HiveMetaStoreClientHelper.java
index 3efc7a1c7e..53e27dfc6c 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/HiveMetaStoreClientHelper.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/HiveMetaStoreClientHelper.java
@@ -286,6 +286,29 @@ public class HiveMetaStoreClientHelper {
         return table;
     }
 
+    /**
+     * Get hive table with dbName and tableName.
+     *
+     * @param dbName database name
+     * @param tableName table name
+     * @param metaStoreUris hive metastore uris
+     * @return HiveTable
+     * @throws DdlException when get table from hive metastore failed.
+     */
+    public static Table getTable(String dbName, String tableName, String metaStoreUris) throws DdlException {
+        HiveMetaStoreClient client = getClient(metaStoreUris);
+        Table table;
+        try {
+            table = client.getTable(dbName, tableName);
+        } catch (TException e) {
+            LOG.warn("Hive metastore thrift exception: {}", e.getMessage());
+            throw new DdlException("Connect hive metastore failed. Error: " + e.getMessage());
+        } finally {
+            client.close();
+        }
+        return table;
+    }
+
     /**
      * Convert Doris expr to Hive expr, only for partition column
      * @param dorisExpr
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Table.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Table.java
index 838a6989f7..e7b2ed43a4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Table.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Table.java
@@ -25,6 +25,7 @@ import org.apache.doris.common.io.Text;
 import org.apache.doris.common.io.Writable;
 import org.apache.doris.common.util.SqlUtils;
 import org.apache.doris.common.util.Util;
+import org.apache.doris.external.hudi.HudiTable;
 import org.apache.doris.thrift.TTableDescriptor;
 
 import com.google.common.base.Preconditions;
@@ -67,7 +68,8 @@ public class Table extends MetaObject implements Writable {
         BROKER,
         ELASTICSEARCH,
         HIVE,
-        ICEBERG
+        ICEBERG,
+        HUDI
     }
 
     protected long id;
@@ -340,6 +342,8 @@ public class Table extends MetaObject implements Writable {
             table = new HiveTable();
         } else if (type == TableType.ICEBERG) {
             table = new IcebergTable();
+        } else if (type == TableType.HUDI) {
+            table = new HudiTable();
         } else {
             throw new IOException("Unknown table type: " + type.name());
         }
@@ -432,6 +436,8 @@ public class Table extends MetaObject implements Writable {
                 return "ElasticSearch";
             case HIVE:
                 return "Hive";
+            case HUDI:
+                return "Hudi";
             default:
                 return null;
         }
@@ -451,6 +457,7 @@ public class Table extends MetaObject implements Writable {
             case BROKER:
             case ELASTICSEARCH:
             case HIVE:
+            case HUDI:
                 return "EXTERNAL TABLE";
             default:
                 return null;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/external/hudi/HudiProperty.java b/fe/fe-core/src/main/java/org/apache/doris/external/hudi/HudiProperty.java
new file mode 100644
index 0000000000..b209c0e711
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/external/hudi/HudiProperty.java
@@ -0,0 +1,29 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.external.hudi;
+
+
+/**
+ * Hudi property contains information to connect a remote hudi db or table.
+ */
+public class HudiProperty {
+    public static final String HUDI_DATABASE = "hudi.database";
+    public static final String HUDI_TABLE = "hudi.table";
+    public static final String HUDI_HIVE_METASTORE_URIS = "hudi.hive.metastore.uris";
+    public static final String HUDI_CATALOG_TYPE = "hudi.catalog.type";
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/external/hudi/HudiTable.java b/fe/fe-core/src/main/java/org/apache/doris/external/hudi/HudiTable.java
new file mode 100644
index 0000000000..0e8e350eb3
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/external/hudi/HudiTable.java
@@ -0,0 +1,125 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.external.hudi;
+
+import org.apache.doris.catalog.Column;
+import org.apache.doris.catalog.Table;
+import org.apache.doris.common.io.Text;
+import org.apache.doris.thrift.THudiTable;
+import org.apache.doris.thrift.TTableDescriptor;
+import org.apache.doris.thrift.TTableType;
+
+import com.google.common.collect.Maps;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * External Hudi table.
+ */
+public class HudiTable extends Table {
+    private static final Logger LOG = LogManager.getLogger(HudiTable.class);
+
+    // table properties of this hudi table
+    private Map<String, String> tableProperties = Maps.newHashMap();
+    // remote Hudi database name in hive metastore
+    private String hmsDatabaseName;
+    // remote Hudi table name in hive metastore
+    private String hmsTableName;
+
+    public HudiTable() {
+        super(TableType.HUDI);
+    }
+
+    /**
+     * Generate a Hudi Table with id, name, schema, properties.
+     *
+     * @param id table id
+     * @param tableName table name
+     * @param fullSchema table's schema
+     * @param tableProperties table's properties
+     */
+    public HudiTable(long id, String tableName, List<Column> fullSchema, Map<String, String>  tableProperties) {
+        super(id, tableName, TableType.HUDI, fullSchema);
+        this.tableProperties = tableProperties;
+        this.hmsDatabaseName = tableProperties.get(HudiProperty.HUDI_DATABASE);
+        this.hmsTableName = tableProperties.get(HudiProperty.HUDI_TABLE);
+    }
+
+    public String getHmsDatabaseName() {
+        return hmsDatabaseName;
+    }
+
+    public String getHmsTableName() {
+        return hmsTableName;
+    }
+
+    public Map<String, String> getTableProperties() {
+        return tableProperties;
+    }
+
+    public String getHmsTableIdentifer() {
+        return String.format("%s.%s", hmsDatabaseName, hmsTableName);
+    }
+
+    @Override
+    public void write(DataOutput out) throws IOException {
+        super.write(out);
+
+        Text.writeString(out, hmsDatabaseName);
+        Text.writeString(out, hmsTableName);
+
+        out.writeInt(tableProperties.size());
+        for (Map.Entry<String, String> entry : tableProperties.entrySet()) {
+            Text.writeString(out, entry.getKey());
+            Text.writeString(out, entry.getValue());
+        }
+    }
+
+    @Override
+    public void readFields(DataInput in) throws IOException {
+        super.readFields(in);
+
+        hmsDatabaseName = Text.readString(in);
+        hmsTableName = Text.readString(in);
+        int size = in.readInt();
+        for (int i = 0; i < size; i++) {
+            String key = Text.readString(in);
+            String value = Text.readString(in);
+            tableProperties.put(key, value);
+        }
+    }
+
+    @Override
+    public TTableDescriptor toThrift() {
+        THudiTable thriftHudiTable = new THudiTable();
+        thriftHudiTable.setDbName(getHmsDatabaseName());
+        thriftHudiTable.setTableName(getHmsTableName());
+        thriftHudiTable.setProperties(getTableProperties());
+
+        TTableDescriptor thriftTableDescriptor = new TTableDescriptor(getId(), TTableType.HUDI_TABLE,
+                fullSchema.size(), 0, getName(), "");
+        thriftTableDescriptor.setHudiTable(thriftHudiTable);
+        return thriftTableDescriptor;
+    }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/external/hudi/HudiUtils.java b/fe/fe-core/src/main/java/org/apache/doris/external/hudi/HudiUtils.java
new file mode 100644
index 0000000000..b1a65ad41d
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/external/hudi/HudiUtils.java
@@ -0,0 +1,128 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.external.hudi;
+
+import org.apache.doris.common.DdlException;
+
+import com.google.common.base.Strings;
+import com.google.common.collect.Maps;
+import org.apache.hudi.hadoop.HoodieParquetInputFormat;
+import org.apache.hudi.hadoop.realtime.HoodieParquetRealtimeInputFormat;
+
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * Hudi utils.
+ */
+public class HudiUtils {
+
+    private static final String PROPERTY_MISSING_MSG =
+            "Hudi table %s is null. Please add properties('%s'='xxx') when create table";
+
+    /**
+     * check hudi table properties.
+     */
+    public static void validateCreateTable(HudiTable table) throws DdlException {
+
+        if (table.getTableProperties() == null) {
+            throw new DdlException("Please set properties of hudi table, "
+                    + "they are: database, table and 'hive.metastore.uris'");
+        }
+
+        Map<String, String> copiedProps = Maps.newHashMap(table.getTableProperties());
+        String hiveDb = copiedProps.get(HudiProperty.HUDI_DATABASE);
+        if (Strings.isNullOrEmpty(hiveDb)) {
+            throw new DdlException(String.format(PROPERTY_MISSING_MSG,
+                    HudiProperty.HUDI_DATABASE, HudiProperty.HUDI_DATABASE));
+        }
+        copiedProps.remove(HudiProperty.HUDI_DATABASE);
+
+        String hiveTable = copiedProps.get(HudiProperty.HUDI_TABLE);
+        if (Strings.isNullOrEmpty(hiveTable)) {
+            throw new DdlException(String.format(PROPERTY_MISSING_MSG,
+                    HudiProperty.HUDI_TABLE, HudiProperty.HUDI_TABLE));
+        }
+        copiedProps.remove(HudiProperty.HUDI_TABLE);
+
+        // check hive properties
+        // hive.metastore.uris
+        String hiveMetastoreUris = copiedProps.get(HudiProperty.HUDI_HIVE_METASTORE_URIS);
+        if (Strings.isNullOrEmpty(hiveMetastoreUris)) {
+            throw new DdlException(String.format(PROPERTY_MISSING_MSG,
+                    HudiProperty.HUDI_HIVE_METASTORE_URIS, HudiProperty.HUDI_HIVE_METASTORE_URIS));
+        }
+        copiedProps.remove(HudiProperty.HUDI_HIVE_METASTORE_URIS);
+
+        if (!copiedProps.isEmpty()) {
+            throw new DdlException("Unknown table properties: " + copiedProps.toString());
+        }
+    }
+
+    /**
+     * check a hiveTable is hudi table or not.
+     *
+     * @param hiveTable hive metastore table
+     * @return true when hiveTable is hudi table, false when it is not
+     */
+    public static boolean isHudiTable(org.apache.hadoop.hive.metastore.api.Table hiveTable) {
+        String inputFormat = hiveTable.getSd().getInputFormat();
+        if (HoodieParquetInputFormat.class.getName().equals(inputFormat)
+                || HoodieParquetRealtimeInputFormat.class.getName().equals(inputFormat)) {
+            return true;
+        }
+        return false;
+    }
+
+    /**
+     * check whether the table is hudi realtime table.
+     *
+     * @param hiveTable hive metastore table
+     * @return true when table is hudi table
+     */
+    public static boolean isHudiRealtimeTable(org.apache.hadoop.hive.metastore.api.Table hiveTable) {
+        String inputFormat = hiveTable.getSd().getInputFormat();
+        if (HoodieParquetRealtimeInputFormat.class.getName().equals(inputFormat)) {
+            return true;
+        }
+        return false;
+    }
+
+    /**
+     * Check if there are duplicate columns in hudi table.
+     * check if columns of hudi table exist in hive table.
+     *
+     * @param table hudi table to be checked
+     * @param hiveTable the corresponding hive table
+     * @throws DdlException when hudi table's column(s) didn't exist in hive table
+     */
+    public static void validateColumns(HudiTable table,
+                                       org.apache.hadoop.hive.metastore.api.Table hiveTable) throws DdlException {
+        Set<String> hudiColumnNames = table.getFullSchema().stream()
+                .map(x -> x.getName()).collect(Collectors.toSet());
+
+        Set<String> hiveTableColumnNames = hiveTable.getSd().getCols()
+                .stream().map(x -> x.getName()).collect(Collectors.toSet());
+        hudiColumnNames.removeAll(hiveTableColumnNames);
+        if (hudiColumnNames.size() > 0) {
+            throw new DdlException(String.format("Hudi table's column(s): {%s} didn't exist in hive table. ",
+                    String.join(", ", hudiColumnNames)));
+        }
+    }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java
index f7f3d9815e..56d6dea663 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java
@@ -398,6 +398,7 @@ public class ShowExecutor {
         rowSet.add(Lists.newArrayList("HIVE", "YES", "HIVE database which data is in it", "NO", "NO", "NO"));
         rowSet.add(Lists.newArrayList("ICEBERG", "YES", "ICEBERG data lake which data is in it", "NO", "NO", "NO"));
         rowSet.add(Lists.newArrayList("ODBC", "YES", "ODBC driver which data we can connect", "NO", "NO", "NO"));
+        rowSet.add(Lists.newArrayList("HUDI", "YES", "HUDI data lake which data is in it", "NO", "NO", "NO"));
 
         // Only success
         resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
diff --git a/fe/fe-core/src/test/java/org/apache/doris/analysis/CreateTableStmtTest.java b/fe/fe-core/src/test/java/org/apache/doris/analysis/CreateTableStmtTest.java
index f59ad88f2f..a9ce32b84e 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/analysis/CreateTableStmtTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/analysis/CreateTableStmtTest.java
@@ -274,4 +274,21 @@ public class CreateTableStmtTest {
                 "\"iceberg.hive.metastore.uris\"  =  \"thrift://127.0.0.1:9087\",\n" +
                 "\"iceberg.table\"  =  \"test\")", stmt.toString());
     }
+
+    @Test
+    public void testCreateHudiTable() throws UserException {
+        Map<String, String> properties = new HashMap<>();
+        properties.put("hudi.database", "doris");
+        properties.put("hudi.table", "test");
+        properties.put("hudi.hive.metastore.uris", "thrift://127.0.0.1:9087");
+        CreateTableStmt stmt = new CreateTableStmt(false, true, tblName, "hudi", properties, "");
+        stmt.analyze(analyzer);
+
+        Assert.assertEquals("CREATE EXTERNAL TABLE `testCluster:db1`.`table1` (\n"
+                + "\n"
+                + ") ENGINE = hudi\n"
+                + "PROPERTIES (\"hudi.database\"  =  \"doris\",\n"
+                + "\"hudi.hive.metastore.uris\"  =  \"thrift://127.0.0.1:9087\",\n"
+                + "\"hudi.table\"  =  \"test\")", stmt.toString());
+    }
 }
diff --git a/fe/pom.xml b/fe/pom.xml
index 11fee07c94..40406208cf 100644
--- a/fe/pom.xml
+++ b/fe/pom.xml
@@ -249,7 +249,8 @@ under the License.
          you can find avro version info in iceberg mvn repository -->
         <iceberg.version>0.12.0</iceberg.version>
         <avro.version>1.10.1</avro.version>
-        <!-- ATTN: avro version must be consistent with Iceberg version -->
+        <!-- hudi -->
+        <hudi.version>0.10.0</hudi.version>
         <parquet.version>1.10.1</parquet.version>
         <commons-collections.version>3.2.2</commons-collections.version>
         <scala.version>2.12.10</scala.version>
@@ -852,6 +853,20 @@ under the License.
                 <version>${avro.version}</version>
             </dependency>
 
+            <!-- https://mvnrepository.com/artifact/org.apache.hudi/hudi-common -->
+            <dependency>
+                <groupId>org.apache.hudi</groupId>
+                <artifactId>hudi-common</artifactId>
+                <version>${hudi.version}</version>
+            </dependency>
+
+            <!-- https://mvnrepository.com/artifact/org.apache.hudi/hudi-hadoop-mr -->
+            <dependency>
+                <groupId>org.apache.hudi</groupId>
+                <artifactId>hudi-hadoop-mr</artifactId>
+                <version>${hudi.version}</version>
+            </dependency>
+
             <dependency>
                 <groupId>org.apache.parquet</groupId>
                 <artifactId>parquet-column</artifactId>
diff --git a/gensrc/thrift/Descriptors.thrift b/gensrc/thrift/Descriptors.thrift
index 7aebce70b6..187df7e0b3 100644
--- a/gensrc/thrift/Descriptors.thrift
+++ b/gensrc/thrift/Descriptors.thrift
@@ -250,6 +250,12 @@ struct TIcebergTable {
   3: required map<string, string> properties
 }
 
+struct THudiTable {
+  1: optional string dbName
+  2: optional string tableName
+  3: optional map<string, string> properties
+}
+
 // "Union" of all table types.
 struct TTableDescriptor {
   1: required Types.TTableId id
@@ -270,6 +276,7 @@ struct TTableDescriptor {
   16: optional TOdbcTable odbcTable
   17: optional THiveTable hiveTable
   18: optional TIcebergTable icebergTable
+  19: optional THudiTable hudiTable
 }
 
 struct TDescriptorTable {
diff --git a/gensrc/thrift/Types.thrift b/gensrc/thrift/Types.thrift
index ab6ce688fa..d75376443f 100644
--- a/gensrc/thrift/Types.thrift
+++ b/gensrc/thrift/Types.thrift
@@ -498,7 +498,8 @@ enum TTableType {
     ES_TABLE,
     ODBC_TABLE,
     HIVE_TABLE,
-    ICEBERG_TABLE
+    ICEBERG_TABLE,
+    HUDI_TABLE
 }
 
 enum TOdbcTableType {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@doris.apache.org
For additional commands, e-mail: commits-help@doris.apache.org