You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by "victor (Jira)" <ji...@apache.org> on 2023/01/25 15:01:00 UTC

[jira] [Updated] (HUDI-5615) Fail to run create table on dataset

     [ https://issues.apache.org/jira/browse/HUDI-5615?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

victor updated HUDI-5615:
-------------------------
    Description: 
How to reproduce

Env EMR 6.9.0 HUDI 0.12.1

[https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-690-release.html]

 
{code:java}
create external table databasename.table_name using hudi location 's3://my_bucket/attributes/table_name/'{code}
 

 

Run 
{code:java}
23/01/25 04:57:32 WARN HoodieTableConfig: Run `table recover-configs` if config update/delete failed midway. Falling back to backed up configs.23/01/25 04:57:32 ERROR SparkSQLDriver: Failed in [create external table segp.pax_table_name_test using hudi location 's3://my_bucket/attributes/table_name/']org.apache.hudi.exception.HoodieIOException: Could not load Hoodie properties from s3://my_bucket/attributes/table_name/.hoodie/hoodie.properties    at org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:255) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:129) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:642) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableMetaClient.access$000(HoodieTableMetaClient.java:80) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:711) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient$lzycompute(HoodieCatalogTable.scala:81) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient(HoodieCatalogTable.scala:79) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig$lzycompute(HoodieCatalogTable.scala:87) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig(HoodieCatalogTable.scala:87) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hudi.command.CreateHoodieTableCommand$.validateTblProperties(CreateHoodieTableCommand.scala:96) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.spark.sql.hudi.command.CreateHoodieTableCommand.run(CreateHoodieTableCommand.scala:66) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:103) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.executeQuery$1(SQLExecution.scala:114) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$7(SQLExecution.scala:139) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:139) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:245) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:138) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:100) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:96) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:615) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:177) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:615) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:591) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:96) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:83) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:81) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.Dataset.<init>(Dataset.scala:222) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:102) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:99) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:651) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:67) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:384) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:504) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1$adapted(SparkSQLCLIDriver.scala:498) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at scala.collection.Iterator.foreach(Iterator.scala:943) ~[scala-library-2.12.15.jar:?]    at scala.collection.Iterator.foreach$(Iterator.scala:943) ~[scala-library-2.12.15.jar:?]    at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) ~[scala-library-2.12.15.jar:?]    at scala.collection.IterableLike.foreach(IterableLike.scala:74) ~[scala-library-2.12.15.jar:?]    at scala.collection.IterableLike.foreach$(IterableLike.scala:73) ~[scala-library-2.12.15.jar:?]    at scala.collection.AbstractIterable.foreach(Iterable.scala:56) ~[scala-library-2.12.15.jar:?]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:498) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:286) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_352]    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_352]    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_352]    at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_352]    at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1006) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1095) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1104) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]Caused by: java.io.FileNotFoundException: No such file or directory 's3://my_bucket/attributes/table_name/.hoodie/hoodie.properties.backup'    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.getFileStatus(S3NativeFileSystem.java:524) ~[emrfs-hadoop-assembly-2.54.0.jar:?]    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.open(S3NativeFileSystem.java:935) ~[emrfs-hadoop-assembly-2.54.0.jar:?]    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.open(S3NativeFileSystem.java:927) ~[emrfs-hadoop-assembly-2.54.0.jar:?]    at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:983) ~[hadoop-client-api-3.3.3-amzn-0.jar:?]    at com.amazon.ws.emr.hadoop.fs.EmrFileSystem.open(EmrFileSystem.java:197) ~[emrfs-hadoop-assembly-2.54.0.jar:?]    at org.apache.hudi.common.fs.HoodieWrapperFileSystem.open(HoodieWrapperFileSystem.java:468) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableConfig.fetchConfigs(HoodieTableConfig.java:309) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:245) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    ... 72 moreorg.apache.hudi.exception.HoodieIOException: Could not load Hoodie properties from s3://my_bucket/attributes/table_name/.hoodie/hoodie.properties    at org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:255)    at org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:129)    at org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:642)    at org.apache.hudi.common.table.HoodieTableMetaClient.access$000(HoodieTableMetaClient.java:80)    at org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:711)    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient$lzycompute(HoodieCatalogTable.scala:81)    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient(HoodieCatalogTable.scala:79)    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig$lzycompute(HoodieCatalogTable.scala:87)    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig(HoodieCatalogTable.scala:87)    at org.apache.spark.sql.hudi.command.CreateHoodieTableCommand$.validateTblProperties(CreateHoodieTableCommand.scala:96)    at org.apache.spark.sql.hudi.command.CreateHoodieTableCommand.run(CreateHoodieTableCommand.scala:66)    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)    at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:103)    at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)    at org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)    at org.apache.spark.sql.execution.SQLExecution$.executeQuery$1(SQLExecution.scala:114)    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$7(SQLExecution.scala:139)    at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)    at org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:139)    at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:245)    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:138)    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)    at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:100)    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:96)    at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:615)    at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:177)    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:615)    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:591)    at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:96)    at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:83)    at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:81)    at org.apache.spark.sql.Dataset.<init>(Dataset.scala:222)    at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:102)    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)    at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:99)    at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622)    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)    at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617)    at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:651)    at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:67)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:384)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:504)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1$adapted(SparkSQLCLIDriver.scala:498)    at scala.collection.Iterator.foreach(Iterator.scala:943)    at scala.collection.Iterator.foreach$(Iterator.scala:943)    at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)    at scala.collection.IterableLike.foreach(IterableLike.scala:74)    at scala.collection.IterableLike.foreach$(IterableLike.scala:73)    at scala.collection.AbstractIterable.foreach(Iterable.scala:56)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:498)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:286)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)    at java.lang.reflect.Method.invoke(Method.java:498)    at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)    at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1006)    at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)    at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)    at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)    at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1095)    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1104)    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)Caused by: java.io.FileNotFoundException: No such file or directory 's3://my_bucket/attributes/table_name/.hoodie/hoodie.properties.backup'    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.getFileStatus(S3NativeFileSystem.java:524)    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.open(S3NativeFileSystem.java:935)    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.open(S3NativeFileSystem.java:927)    at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:983)    at com.amazon.ws.emr.hadoop.fs.EmrFileSystem.open(EmrFileSystem.java:197)    at org.apache.hudi.common.fs.HoodieWrapperFileSystem.open(HoodieWrapperFileSystem.java:468)    at org.apache.hudi.common.table.HoodieTableConfig.fetchConfigs(HoodieTableConfig.java:309)    at org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:245)    ... 72 more {code}

  was:
{code:java}
23/01/25 04:57:32 WARN HoodieTableConfig: Run `table recover-configs` if config update/delete failed midway. Falling back to backed up configs.23/01/25 04:57:32 ERROR SparkSQLDriver: Failed in [create external table segp.pax_table_name_test using hudi location 's3://my_bucket/attributes/table_name/']org.apache.hudi.exception.HoodieIOException: Could not load Hoodie properties from s3://my_bucket/attributes/table_name/.hoodie/hoodie.properties    at org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:255) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:129) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:642) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableMetaClient.access$000(HoodieTableMetaClient.java:80) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:711) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient$lzycompute(HoodieCatalogTable.scala:81) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient(HoodieCatalogTable.scala:79) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig$lzycompute(HoodieCatalogTable.scala:87) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig(HoodieCatalogTable.scala:87) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hudi.command.CreateHoodieTableCommand$.validateTblProperties(CreateHoodieTableCommand.scala:96) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.spark.sql.hudi.command.CreateHoodieTableCommand.run(CreateHoodieTableCommand.scala:66) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:103) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.executeQuery$1(SQLExecution.scala:114) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$7(SQLExecution.scala:139) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:139) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:245) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:138) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:100) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:96) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:615) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:177) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:615) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:591) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:96) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:83) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:81) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.Dataset.<init>(Dataset.scala:222) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:102) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:99) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:651) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:67) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:384) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:504) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1$adapted(SparkSQLCLIDriver.scala:498) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at scala.collection.Iterator.foreach(Iterator.scala:943) ~[scala-library-2.12.15.jar:?]    at scala.collection.Iterator.foreach$(Iterator.scala:943) ~[scala-library-2.12.15.jar:?]    at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) ~[scala-library-2.12.15.jar:?]    at scala.collection.IterableLike.foreach(IterableLike.scala:74) ~[scala-library-2.12.15.jar:?]    at scala.collection.IterableLike.foreach$(IterableLike.scala:73) ~[scala-library-2.12.15.jar:?]    at scala.collection.AbstractIterable.foreach(Iterable.scala:56) ~[scala-library-2.12.15.jar:?]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:498) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:286) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_352]    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_352]    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_352]    at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_352]    at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1006) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1095) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1104) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]Caused by: java.io.FileNotFoundException: No such file or directory 's3://my_bucket/attributes/table_name/.hoodie/hoodie.properties.backup'    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.getFileStatus(S3NativeFileSystem.java:524) ~[emrfs-hadoop-assembly-2.54.0.jar:?]    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.open(S3NativeFileSystem.java:935) ~[emrfs-hadoop-assembly-2.54.0.jar:?]    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.open(S3NativeFileSystem.java:927) ~[emrfs-hadoop-assembly-2.54.0.jar:?]    at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:983) ~[hadoop-client-api-3.3.3-amzn-0.jar:?]    at com.amazon.ws.emr.hadoop.fs.EmrFileSystem.open(EmrFileSystem.java:197) ~[emrfs-hadoop-assembly-2.54.0.jar:?]    at org.apache.hudi.common.fs.HoodieWrapperFileSystem.open(HoodieWrapperFileSystem.java:468) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableConfig.fetchConfigs(HoodieTableConfig.java:309) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:245) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    ... 72 moreorg.apache.hudi.exception.HoodieIOException: Could not load Hoodie properties from s3://my_bucket/attributes/table_name/.hoodie/hoodie.properties    at org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:255)    at org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:129)    at org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:642)    at org.apache.hudi.common.table.HoodieTableMetaClient.access$000(HoodieTableMetaClient.java:80)    at org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:711)    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient$lzycompute(HoodieCatalogTable.scala:81)    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient(HoodieCatalogTable.scala:79)    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig$lzycompute(HoodieCatalogTable.scala:87)    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig(HoodieCatalogTable.scala:87)    at org.apache.spark.sql.hudi.command.CreateHoodieTableCommand$.validateTblProperties(CreateHoodieTableCommand.scala:96)    at org.apache.spark.sql.hudi.command.CreateHoodieTableCommand.run(CreateHoodieTableCommand.scala:66)    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)    at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:103)    at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)    at org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)    at org.apache.spark.sql.execution.SQLExecution$.executeQuery$1(SQLExecution.scala:114)    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$7(SQLExecution.scala:139)    at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)    at org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:139)    at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:245)    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:138)    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)    at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:100)    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:96)    at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:615)    at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:177)    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:615)    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:591)    at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:96)    at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:83)    at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:81)    at org.apache.spark.sql.Dataset.<init>(Dataset.scala:222)    at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:102)    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)    at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:99)    at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622)    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)    at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617)    at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:651)    at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:67)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:384)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:504)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1$adapted(SparkSQLCLIDriver.scala:498)    at scala.collection.Iterator.foreach(Iterator.scala:943)    at scala.collection.Iterator.foreach$(Iterator.scala:943)    at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)    at scala.collection.IterableLike.foreach(IterableLike.scala:74)    at scala.collection.IterableLike.foreach$(IterableLike.scala:73)    at scala.collection.AbstractIterable.foreach(Iterable.scala:56)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:498)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:286)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)    at java.lang.reflect.Method.invoke(Method.java:498)    at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)    at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1006)    at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)    at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)    at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)    at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1095)    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1104)    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)Caused by: java.io.FileNotFoundException: No such file or directory 's3://my_bucket/attributes/table_name/.hoodie/hoodie.properties.backup'    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.getFileStatus(S3NativeFileSystem.java:524)    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.open(S3NativeFileSystem.java:935)    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.open(S3NativeFileSystem.java:927)    at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:983)    at com.amazon.ws.emr.hadoop.fs.EmrFileSystem.open(EmrFileSystem.java:197)    at org.apache.hudi.common.fs.HoodieWrapperFileSystem.open(HoodieWrapperFileSystem.java:468)    at org.apache.hudi.common.table.HoodieTableConfig.fetchConfigs(HoodieTableConfig.java:309)    at org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:245)    ... 72 more {code}


> Fail to run create table on dataset
> -----------------------------------
>
>                 Key: HUDI-5615
>                 URL: https://issues.apache.org/jira/browse/HUDI-5615
>             Project: Apache Hudi
>          Issue Type: Task
>            Reporter: victor
>            Priority: Major
>
> How to reproduce
> Env EMR 6.9.0 HUDI 0.12.1
> [https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-690-release.html]
>  
> {code:java}
> create external table databasename.table_name using hudi location 's3://my_bucket/attributes/table_name/'{code}
>  
>  
> Run 
> {code:java}
> 23/01/25 04:57:32 WARN HoodieTableConfig: Run `table recover-configs` if config update/delete failed midway. Falling back to backed up configs.23/01/25 04:57:32 ERROR SparkSQLDriver: Failed in [create external table segp.pax_table_name_test using hudi location 's3://my_bucket/attributes/table_name/']org.apache.hudi.exception.HoodieIOException: Could not load Hoodie properties from s3://my_bucket/attributes/table_name/.hoodie/hoodie.properties    at org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:255) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:129) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:642) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableMetaClient.access$000(HoodieTableMetaClient.java:80) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:711) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient$lzycompute(HoodieCatalogTable.scala:81) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient(HoodieCatalogTable.scala:79) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig$lzycompute(HoodieCatalogTable.scala:87) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig(HoodieCatalogTable.scala:87) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hudi.command.CreateHoodieTableCommand$.validateTblProperties(CreateHoodieTableCommand.scala:96) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.spark.sql.hudi.command.CreateHoodieTableCommand.run(CreateHoodieTableCommand.scala:66) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:103) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.executeQuery$1(SQLExecution.scala:114) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$7(SQLExecution.scala:139) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:139) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:245) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:138) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:100) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:96) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:615) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:177) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:615) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:591) ~[spark-catalyst_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:96) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:83) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:81) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.Dataset.<init>(Dataset.scala:222) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:102) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:99) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:651) ~[spark-sql_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:67) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:384) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:504) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1$adapted(SparkSQLCLIDriver.scala:498) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at scala.collection.Iterator.foreach(Iterator.scala:943) ~[scala-library-2.12.15.jar:?]    at scala.collection.Iterator.foreach$(Iterator.scala:943) ~[scala-library-2.12.15.jar:?]    at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) ~[scala-library-2.12.15.jar:?]    at scala.collection.IterableLike.foreach(IterableLike.scala:74) ~[scala-library-2.12.15.jar:?]    at scala.collection.IterableLike.foreach$(IterableLike.scala:73) ~[scala-library-2.12.15.jar:?]    at scala.collection.AbstractIterable.foreach(Iterable.scala:56) ~[scala-library-2.12.15.jar:?]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:498) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:286) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala) ~[spark-hive-thriftserver_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_352]    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_352]    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_352]    at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_352]    at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1006) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1095) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1104) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) ~[spark-core_2.12-3.3.0-amzn-1.jar:3.3.0-amzn-1]Caused by: java.io.FileNotFoundException: No such file or directory 's3://my_bucket/attributes/table_name/.hoodie/hoodie.properties.backup'    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.getFileStatus(S3NativeFileSystem.java:524) ~[emrfs-hadoop-assembly-2.54.0.jar:?]    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.open(S3NativeFileSystem.java:935) ~[emrfs-hadoop-assembly-2.54.0.jar:?]    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.open(S3NativeFileSystem.java:927) ~[emrfs-hadoop-assembly-2.54.0.jar:?]    at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:983) ~[hadoop-client-api-3.3.3-amzn-0.jar:?]    at com.amazon.ws.emr.hadoop.fs.EmrFileSystem.open(EmrFileSystem.java:197) ~[emrfs-hadoop-assembly-2.54.0.jar:?]    at org.apache.hudi.common.fs.HoodieWrapperFileSystem.open(HoodieWrapperFileSystem.java:468) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableConfig.fetchConfigs(HoodieTableConfig.java:309) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    at org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:245) ~[hudi-spark3-bundle_2.12-0.12.1-amzn-0.jar:0.12.1-amzn-0]    ... 72 moreorg.apache.hudi.exception.HoodieIOException: Could not load Hoodie properties from s3://my_bucket/attributes/table_name/.hoodie/hoodie.properties    at org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:255)    at org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:129)    at org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:642)    at org.apache.hudi.common.table.HoodieTableMetaClient.access$000(HoodieTableMetaClient.java:80)    at org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:711)    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient$lzycompute(HoodieCatalogTable.scala:81)    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient(HoodieCatalogTable.scala:79)    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig$lzycompute(HoodieCatalogTable.scala:87)    at org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig(HoodieCatalogTable.scala:87)    at org.apache.spark.sql.hudi.command.CreateHoodieTableCommand$.validateTblProperties(CreateHoodieTableCommand.scala:96)    at org.apache.spark.sql.hudi.command.CreateHoodieTableCommand.run(CreateHoodieTableCommand.scala:66)    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)    at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)    at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:103)    at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)    at org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)    at org.apache.spark.sql.execution.SQLExecution$.executeQuery$1(SQLExecution.scala:114)    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$7(SQLExecution.scala:139)    at org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)    at org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:224)    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:139)    at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:245)    at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:138)    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)    at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:100)    at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:96)    at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:615)    at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:177)    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:615)    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)    at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)    at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)    at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:591)    at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:96)    at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:83)    at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:81)    at org.apache.spark.sql.Dataset.<init>(Dataset.scala:222)    at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:102)    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)    at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:99)    at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622)    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)    at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617)    at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:651)    at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:67)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:384)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:504)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1$adapted(SparkSQLCLIDriver.scala:498)    at scala.collection.Iterator.foreach(Iterator.scala:943)    at scala.collection.Iterator.foreach$(Iterator.scala:943)    at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)    at scala.collection.IterableLike.foreach(IterableLike.scala:74)    at scala.collection.IterableLike.foreach$(IterableLike.scala:73)    at scala.collection.AbstractIterable.foreach(Iterable.scala:56)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:498)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:286)    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)    at java.lang.reflect.Method.invoke(Method.java:498)    at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)    at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1006)    at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)    at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)    at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)    at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1095)    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1104)    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)Caused by: java.io.FileNotFoundException: No such file or directory 's3://my_bucket/attributes/table_name/.hoodie/hoodie.properties.backup'    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.getFileStatus(S3NativeFileSystem.java:524)    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.open(S3NativeFileSystem.java:935)    at com.amazon.ws.emr.hadoop.fs.s3n.S3NativeFileSystem.open(S3NativeFileSystem.java:927)    at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:983)    at com.amazon.ws.emr.hadoop.fs.EmrFileSystem.open(EmrFileSystem.java:197)    at org.apache.hudi.common.fs.HoodieWrapperFileSystem.open(HoodieWrapperFileSystem.java:468)    at org.apache.hudi.common.table.HoodieTableConfig.fetchConfigs(HoodieTableConfig.java:309)    at org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:245)    ... 72 more {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)