You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by le...@apache.org on 2022/06/03 09:16:54 UTC

[hudi] branch master updated: [HUDI-4183] Fix using HoodieCatalog to create non-hudi tables (#5743)

This is an automated email from the ASF dual-hosted git repository.

leesf pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 3759a38b99 [HUDI-4183] Fix using HoodieCatalog to create non-hudi tables (#5743)
3759a38b99 is described below

commit 3759a38b99cf9bb7540cd1881879cc0547a25e70
Author: leesf <49...@qq.com>
AuthorDate: Fri Jun 3 17:16:48 2022 +0800

    [HUDI-4183] Fix using HoodieCatalog to create non-hudi tables (#5743)
---
 .../apache/spark/sql/hudi/TestCreateTable.scala    | 31 ++++++++++++++++++++++
 .../spark/sql/hudi/catalog/HoodieCatalog.scala     | 10 ++++---
 2 files changed, 38 insertions(+), 3 deletions(-)

diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestCreateTable.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestCreateTable.scala
index cad30eca24..7091de4a8e 100644
--- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestCreateTable.scala
+++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestCreateTable.scala
@@ -781,4 +781,35 @@ class TestCreateTable extends HoodieSparkSqlTestBase {
     val tablePath = s"${dbPath}/${tableName}"
     assertResult(false)(existsPath(tablePath))
   }
+
+  test("Test Create Non-Hudi Table(Parquet Table)") {
+    val databaseName = "test_database"
+    spark.sql(s"create database if not exists $databaseName")
+    spark.sql(s"use $databaseName")
+
+    val tableName = generateTableName
+    // Create a managed table
+    spark.sql(
+      s"""
+         | create table $tableName (
+         |  id int,
+         |  name string,
+         |  price double,
+         |  ts long
+         | ) using parquet
+       """.stripMargin)
+    val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier(tableName))
+    assertResult(tableName)(table.identifier.table)
+    assertResult("parquet")(table.provider.get)
+    assertResult(CatalogTableType.MANAGED)(table.tableType)
+    assertResult(
+      Seq(
+        StructField("id", IntegerType),
+        StructField("name", StringType),
+        StructField("price", DoubleType),
+        StructField("ts", LongType))
+    )(table.schema.fields)
+
+    spark.sql("use default")
+  }
 }
diff --git a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/hudi/catalog/HoodieCatalog.scala b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/hudi/catalog/HoodieCatalog.scala
index 67012c7723..e1c2f228fa 100644
--- a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/hudi/catalog/HoodieCatalog.scala
+++ b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/hudi/catalog/HoodieCatalog.scala
@@ -118,9 +118,13 @@ class HoodieCatalog extends DelegatingCatalogExtension
                            schema: StructType,
                            partitions: Array[Transform],
                            properties: util.Map[String, String]): Table = {
-    val locUriAndTableType = deduceTableLocationURIAndTableType(ident, properties)
-    createHoodieTable(ident, schema, locUriAndTableType, partitions, properties,
-      Map.empty, Option.empty, TableCreationMode.CREATE)
+    if (sparkAdapter.isHoodieTable(properties)) {
+      val locUriAndTableType = deduceTableLocationURIAndTableType(ident, properties)
+      createHoodieTable(ident, schema, locUriAndTableType, partitions, properties,
+        Map.empty, Option.empty, TableCreationMode.CREATE)
+    } else {
+      super.createTable(ident, schema, partitions, properties)
+    }
   }
 
   override def tableExists(ident: Identifier): Boolean = super.tableExists(ident)