You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by fo...@apache.org on 2022/05/16 15:26:33 UTC

[hudi] branch master updated: [HUDI-4103] [HUDI-4001] Filter the properties should not be used when create table for Spark SQL

This is an automated email from the ASF dual-hosted git repository.

forwardxu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new a7a42e4490 [HUDI-4103] [HUDI-4001] Filter the properties should not be used when create table for Spark SQL
a7a42e4490 is described below

commit a7a42e4490cc8a96b12b8ebbecb4b90d8e8ecbdb
Author: 董可伦 <do...@inspur.com>
AuthorDate: Mon May 16 23:26:23 2022 +0800

    [HUDI-4103] [HUDI-4001] Filter the properties should not be used when create table for Spark SQL
---
 .../main/scala/org/apache/spark/sql/hudi/catalog/HoodieCatalog.scala | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/hudi/catalog/HoodieCatalog.scala b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/hudi/catalog/HoodieCatalog.scala
index 5f4572dcc9..2c5261a12f 100644
--- a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/hudi/catalog/HoodieCatalog.scala
+++ b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/hudi/catalog/HoodieCatalog.scala
@@ -26,6 +26,7 @@ import org.apache.hudi.{DataSourceWriteOptions, SparkAdapterSupport}
 import org.apache.spark.sql.HoodieSpark3SqlUtils.convertTransforms
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.analysis.{NoSuchTableException, TableAlreadyExistsException, UnresolvedAttribute}
+import org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.needFilterProps
 import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogTableType, CatalogUtils, HoodieCatalogTable}
 import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.IdentifierHelper
 import org.apache.spark.sql.connector.catalog.TableChange.{AddColumn, ColumnChange, UpdateColumnComment, UpdateColumnType}
@@ -215,7 +216,7 @@ class HoodieCatalog extends DelegatingCatalogExtension
     val loc = locUriOpt
       .orElse(existingTableOpt.flatMap(_.storage.locationUri))
       .getOrElse(spark.sessionState.catalog.defaultTablePath(id))
-    val storage = DataSource.buildStorageFormatFromOptions(writeOptions)
+    val storage = DataSource.buildStorageFormatFromOptions(writeOptions.--(needFilterProps))
       .copy(locationUri = Option(loc))
     val tableType =
       if (location.isDefined) CatalogTableType.EXTERNAL else CatalogTableType.MANAGED
@@ -233,7 +234,7 @@ class HoodieCatalog extends DelegatingCatalogExtension
       provider = Option("hudi"),
       partitionColumnNames = newPartitionColumns,
       bucketSpec = newBucketSpec,
-      properties = tablePropertiesNew.asScala.toMap,
+      properties = tablePropertiesNew.asScala.toMap.--(needFilterProps),
       comment = commentOpt)
 
     val hoodieCatalogTable = HoodieCatalogTable(spark, tableDesc)