You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by lz...@apache.org on 2020/08/10 03:44:38 UTC

[flink] branch release-1.11 updated: [FLINK-18678][hive][doc] Update doc about setting hive version

This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch release-1.11
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.11 by this push:
     new c0aaa2d  [FLINK-18678][hive][doc] Update doc about setting hive version
c0aaa2d is described below

commit c0aaa2d0e9f3035820ff9e4a5c565bde4a75b42c
Author: Rui Li <li...@apache.org>
AuthorDate: Mon Aug 10 11:38:36 2020 +0800

    [FLINK-18678][hive][doc] Update doc about setting hive version
    
    This closes #12988
---
 docs/dev/table/catalogs.md      |  8 ++++----
 docs/dev/table/catalogs.zh.md   |  8 ++++----
 docs/dev/table/hive/index.md    | 15 ++++++---------
 docs/dev/table/hive/index.zh.md | 14 +++++---------
 4 files changed, 19 insertions(+), 26 deletions(-)

diff --git a/docs/dev/table/catalogs.md b/docs/dev/table/catalogs.md
index 1f1d9e9..836bf72 100644
--- a/docs/dev/table/catalogs.md
+++ b/docs/dev/table/catalogs.md
@@ -74,7 +74,7 @@ Users can use SQL DDL to create tables in catalogs in both Table API and SQL.
 TableEnvironment tableEnv = ...
 
 // Create a HiveCatalog 
-Catalog catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>", "<hive_version>");
+Catalog catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>");
 
 // Register the catalog
 tableEnv.registerCatalog("myhive", catalog);
@@ -94,7 +94,7 @@ tableEnv.listTables(); // should return the tables in current catalog and databa
 val tableEnv = ...
 
 // Create a HiveCatalog 
-val catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>", "<hive_version>")
+val catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>")
 
 // Register the catalog
 tableEnv.registerCatalog("myhive", catalog)
@@ -161,7 +161,7 @@ import org.apache.flink.table.descriptors.Kafka;
 TableEnvironment tableEnv = TableEnvironment.create(EnvironmentSettings.newInstance().build());
 
 // Create a HiveCatalog 
-Catalog catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>", "<hive_version>");
+Catalog catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>");
 
 // Register the catalog
 tableEnv.registerCatalog("myhive", catalog);
@@ -203,7 +203,7 @@ import org.apache.flink.table.descriptors.Kafka
 val tableEnv = TableEnvironment.create(EnvironmentSettings.newInstance.build)
 
 // Create a HiveCatalog 
-val catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>", "<hive_version>")
+val catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>")
 
 // Register the catalog
 tableEnv.registerCatalog("myhive", catalog)
diff --git a/docs/dev/table/catalogs.zh.md b/docs/dev/table/catalogs.zh.md
index 26c664a..6711d96 100644
--- a/docs/dev/table/catalogs.zh.md
+++ b/docs/dev/table/catalogs.zh.md
@@ -70,7 +70,7 @@ Catalog 是可扩展的,用户可以通过实现 `Catalog` 接口来开发自
 TableEnvironment tableEnv = ...
 
 // Create a HiveCatalog 
-Catalog catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>", "<hive_version>");
+Catalog catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>");
 
 // Register the catalog
 tableEnv.registerCatalog("myhive", catalog);
@@ -90,7 +90,7 @@ tableEnv.listTables(); // should return the tables in current catalog and databa
 val tableEnv = ...
 
 // Create a HiveCatalog 
-val catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>", "<hive_version>");
+val catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>");
 
 // Register the catalog
 tableEnv.registerCatalog("myhive", catalog);
@@ -157,7 +157,7 @@ import org.apache.flink.table.descriptors.Kafka;
 TableEnvironment tableEnv = TableEnvironment.create(EnvironmentSettings.newInstance().build());
 
 // Create a HiveCatalog
-Catalog catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>", "<hive_version>");
+Catalog catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>");
 
 // Register the catalog
 tableEnv.registerCatalog("myhive", catalog);
@@ -199,7 +199,7 @@ import org.apache.flink.table.descriptors.Kafka
 val tableEnv = TableEnvironment.create(EnvironmentSettings.newInstance.build)
 
 // Create a HiveCatalog
-val catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>", "<hive_version>")
+val catalog = new HiveCatalog("myhive", null, "<path_of_hive_conf>")
 
 // Register the catalog
 tableEnv.registerCatalog("myhive", catalog)
diff --git a/docs/dev/table/hive/index.md b/docs/dev/table/hive/index.md
index 4d4af17..7778982 100644
--- a/docs/dev/table/hive/index.md
+++ b/docs/dev/table/hive/index.md
@@ -251,10 +251,6 @@ Please find the required dependencies for different Hive major versions below.
 </div>
 </div>
 
-If you use the hive version of HDP or CDH, you need to refer to the dependency in the previous section and select a similar version.
-
-And you need to specify selected and supported "hive-version" in yaml, HiveCatalog and HiveModule.
-
 ### Program maven
 
 If you are building your own program, you need the following dependencies in your mvn file.
@@ -297,9 +293,12 @@ the hive configuration file to their local environment first.
 Please note while HiveCatalog doesn't require a particular planner, reading/writing Hive tables only works with blink planner.
 Therefore it's highly recommended that you use blink planner when connecting to your Hive warehouse.
 
+`HiveCatalog` is capable of automatically detecting the Hive version in use. It's recommended **NOT** to specify the Hive
+version, unless the automatic detection fails.
+
 <div class="codetabs" markdown="1">
 <div data-lang="Java" markdown="1">
-Take Hive version 2.3.4 for example:
+Following is an example of how to connect to Hive:
 
 {% highlight java %}
 
@@ -309,9 +308,8 @@ TableEnvironment tableEnv = TableEnvironment.create(settings);
 String name            = "myhive";
 String defaultDatabase = "mydatabase";
 String hiveConfDir     = "/opt/hive-conf"; // a local path
-String version         = "2.3.4";
 
-HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir, version);
+HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir);
 tableEnv.registerCatalog("myhive", hive);
 
 // set the HiveCatalog as the current catalog of the session
@@ -329,9 +327,8 @@ val tableEnv = TableEnvironment.create(settings)
 val name            = "myhive"
 val defaultDatabase = "mydatabase"
 val hiveConfDir     = "/opt/hive-conf" // a local path
-val version         = "2.3.4"
 
-val hive = new HiveCatalog(name, defaultDatabase, hiveConfDir, version)
+val hive = new HiveCatalog(name, defaultDatabase, hiveConfDir)
 tableEnv.registerCatalog("myhive", hive)
 
 // set the HiveCatalog as the current catalog of the session
diff --git a/docs/dev/table/hive/index.zh.md b/docs/dev/table/hive/index.zh.md
index 8b13f57..c7dc4c6 100644
--- a/docs/dev/table/hive/index.zh.md
+++ b/docs/dev/table/hive/index.zh.md
@@ -250,10 +250,6 @@ Apache Hive 是基于 Hadoop 之上构建的, 首先您需要 Hadoop 的依赖
 </div>
 </div>
 
-如果使用 Hive 的 HDP 或 CDH 版本,则需要参考上一节中的依赖项并选择一个类似的版本。
-
-并且您需要在定义 yaml 文件,或者创建 HiveCatalog 和 HiveModule 时,指定一个支持的 “hive-version”。
-
 ### Maven 依赖
 
 如果您在构建自己的应用程序,则需要在 mvn 文件中添加以下依赖项。
@@ -292,9 +288,11 @@ Apache Hive 是基于 Hadoop 之上构建的, 首先您需要 Hadoop 的依赖
 
 请注意,虽然 HiveCatalog 不需要特定的 planner,但读写Hive表仅适用于 Blink planner。因此,强烈建议您在连接到 Hive 仓库时使用 Blink planner。
 
+`HiveCatalog` 能够自动检测使用的 Hive 版本。我们建议**不要**手动设置 Hive 版本,除非自动检测机制失败。
+
 <div class="codetabs" markdown="1">
 <div data-lang="Java" markdown="1">
-以Hive 2.3.4版本为例:
+以下是如何连接到 Hive 的示例:
 
 {% highlight java %}
 
@@ -304,9 +302,8 @@ TableEnvironment tableEnv = TableEnvironment.create(settings);
 String name            = "myhive";
 String defaultDatabase = "mydatabase";
 String hiveConfDir     = "/opt/hive-conf"; // a local path
-String version         = "2.3.4";
 
-HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir, version);
+HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir);
 tableEnv.registerCatalog("myhive", hive);
 
 // set the HiveCatalog as the current catalog of the session
@@ -324,9 +321,8 @@ val tableEnv = TableEnvironment.create(settings)
 val name            = "myhive"
 val defaultDatabase = "mydatabase"
 val hiveConfDir     = "/opt/hive-conf" // a local path
-val version         = "2.3.4"
 
-val hive = new HiveCatalog(name, defaultDatabase, hiveConfDir, version)
+val hive = new HiveCatalog(name, defaultDatabase, hiveConfDir)
 tableEnv.registerCatalog("myhive", hive)
 
 // set the HiveCatalog as the current catalog of the session