You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by le...@apache.org on 2022/06/25 13:03:25 UTC

[hudi] branch master updated: [HUDI-4296] Fix the bug that TestHoodieSparkSqlWriter.testSchemaEvolutionForTableType is flaky (#5973)

This is an automated email from the ASF dual-hosted git repository.

leesf pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 142adf4ccb [HUDI-4296] Fix the bug that TestHoodieSparkSqlWriter.testSchemaEvolutionForTableType is flaky (#5973)
142adf4ccb is described below

commit 142adf4ccbf92050dd14d1997c0438565fdfc2cd
Author: xiarixiaoyao <me...@qq.com>
AuthorDate: Sat Jun 25 21:03:19 2022 +0800

    [HUDI-4296] Fix the bug that TestHoodieSparkSqlWriter.testSchemaEvolutionForTableType is flaky (#5973)
---
 .../src/main/scala/org/apache/hudi/BaseFileOnlyRelation.scala      | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/BaseFileOnlyRelation.scala b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/BaseFileOnlyRelation.scala
index 4160c34b0c..d6ec645920 100644
--- a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/BaseFileOnlyRelation.scala
+++ b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/BaseFileOnlyRelation.scala
@@ -166,7 +166,9 @@ class BaseFileOnlyRelation(sqlContext: SQLContext,
       DataSource.apply(
         sparkSession = sparkSession,
         paths = extraReadPaths,
-        userSpecifiedSchema = userSchema,
+        // Here we should specify the schema to the latest commit schema since
+        // the table schema evolution.
+        userSpecifiedSchema = userSchema.orElse(Some(tableStructSchema)),
         className = formatClassName,
         // Since we're reading the table as just collection of files we have to make sure
         // we only read the latest version of every Hudi's file-group, which might be compacted, clustered, etc.
@@ -175,8 +177,7 @@ class BaseFileOnlyRelation(sqlContext: SQLContext,
         // We rely on [[HoodieROTablePathFilter]], to do proper filtering to assure that
         options = optParams ++ Map(
           "mapreduce.input.pathFilter.class" -> classOf[HoodieROTablePathFilter].getName
-        ),
-        partitionColumns = partitionColumns
+        )
       )
         .resolveRelation()
         .asInstanceOf[HadoopFsRelation]