You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by "Ethan Guo (Jira)" <ji...@apache.org> on 2022/02/07 20:42:00 UTC
[jira] [Commented] (HUDI-3334) Unable to merge HoodieMetadataPayload during partition listing
[ https://issues.apache.org/jira/browse/HUDI-3334?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17488425#comment-17488425 ]
Ethan Guo commented on HUDI-3334:
---------------------------------
[~shivnarayan] This is still open. I'm yet to investigate the issue.
> Unable to merge HoodieMetadataPayload during partition listing
> --------------------------------------------------------------
>
> Key: HUDI-3334
> URL: https://issues.apache.org/jira/browse/HUDI-3334
> Project: Apache Hudi
> Issue Type: Task
> Reporter: Ethan Guo
> Priority: Blocker
> Fix For: 0.11.0
>
>
> When running the integration test with `mvn -Pintegration-tests verify`, the test failed due to retrieving list of partition from metadata table.
> Stacktrace:
> {code:java}
> Caused by: org.apache.hudi.exception.HoodieException: Error fetching partition paths from metadata table
> at org.apache.hudi.common.fs.FSUtils.getAllPartitionPaths(FSUtils.java:299)
> at org.apache.hudi.HoodieTableFileIndexBase.getAllQueryPartitionPaths(HoodieTableFileIndexBase.scala:233)
> at org.apache.hudi.HoodieTableFileIndexBase.loadPartitionPathFiles(HoodieTableFileIndexBase.scala:195)
> at org.apache.hudi.HoodieTableFileIndexBase.refresh0(HoodieTableFileIndexBase.scala:108)
> at org.apache.hudi.HoodieTableFileIndexBase.<init>(HoodieTableFileIndexBase.scala:88)
> at org.apache.hudi.hadoop.HiveHoodieTableFileIndex.<init>(HiveHoodieTableFileIndex.java:52)
> at org.apache.hudi.hadoop.HoodieFileInputFormatBase.listStatusForSnapshotMode(HoodieFileInputFormatBase.java:170)
> at org.apache.hudi.hadoop.HoodieFileInputFormatBase.listStatus(HoodieFileInputFormatBase.java:141)
> at org.apache.hadoop.mapred.FileInputFormat.getSplits(FileInputFormat.java:322)
> at org.apache.hadoop.hive.ql.io.HiveInputFormat.addSplitsForGroup(HiveInputFormat.java:442)
> at org.apache.hadoop.hive.ql.io.HiveInputFormat.getSplits(HiveInputFormat.java:561)
> at org.apache.hadoop.mapreduce.JobSubmitter.writeOldSplits(JobSubmitter.java:330)
> at org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:322)
> at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:198)
> at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1341)
> at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1338)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:422)
> at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1840)
> at org.apache.hadoop.mapreduce.Job.submit(Job.java:1338)
> at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:575)
> at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:570)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:422)
> at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1840)
> at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:570)
> at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:561)
> at org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:411)
> at org.apache.hadoop.hive.ql.exec.mr.MapRedTask.execute(MapRedTask.java:151)
> at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:199)
> at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)
> at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2183)
> at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1839)
> at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1526)
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1237)
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1232)
> at org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:255)
> ... 11 more
> Caused by: org.apache.hudi.exception.HoodieMetadataException: Failed to retrieve list of partition from metadata
> at org.apache.hudi.metadata.BaseTableMetadata.getAllPartitionPaths(BaseTableMetadata.java:100)
> at org.apache.hudi.common.fs.FSUtils.getAllPartitionPaths(FSUtils.java:297)
> ... 47 more
> Caused by: org.apache.hudi.exception.HoodieException: Exception when reading log file
> at org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scan(AbstractHoodieLogRecordReader.java:333)
> at org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scan(AbstractHoodieLogRecordReader.java:179)
> at org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.performScan(HoodieMergedLogRecordScanner.java:103)
> at org.apache.hudi.metadata.HoodieMetadataMergedLogRecordReader.<init>(HoodieMetadataMergedLogRecordReader.java:71)
> at org.apache.hudi.metadata.HoodieMetadataMergedLogRecordReader.<init>(HoodieMetadataMergedLogRecordReader.java:51)
> at org.apache.hudi.metadata.HoodieMetadataMergedLogRecordReader$Builder.build(HoodieMetadataMergedLogRecordReader.java:246)
> at org.apache.hudi.metadata.HoodieBackedTableMetadata.getLogRecordScanner(HoodieBackedTableMetadata.java:346)
> at org.apache.hudi.metadata.HoodieBackedTableMetadata.lambda$openReadersIfNeeded$2(HoodieBackedTableMetadata.java:262)
> at java.util.concurrent.ConcurrentHashMap.computeIfAbsent(ConcurrentHashMap.java:1660)
> at org.apache.hudi.metadata.HoodieBackedTableMetadata.openReadersIfNeeded(HoodieBackedTableMetadata.java:239)
> at org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordsByKeys(HoodieBackedTableMetadata.java:129)
> at org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordByKey(HoodieBackedTableMetadata.java:124)
> at org.apache.hudi.metadata.BaseTableMetadata.fetchAllPartitionPaths(BaseTableMetadata.java:154)
> at org.apache.hudi.metadata.BaseTableMetadata.getAllPartitionPaths(BaseTableMetadata.java:98)
> ... 48 more
> Caused by: java.lang.IllegalArgumentException: Cannot combine 2 with 1
> at org.apache.hudi.common.util.ValidationUtils.checkArgument(ValidationUtils.java:40)
> at org.apache.hudi.metadata.HoodieMetadataPayload.preCombine(HoodieMetadataPayload.java:141)
> at org.apache.hudi.metadata.HoodieMetadataPayload.preCombine(HoodieMetadataPayload.java:63)
> at org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.processNextRecord(HoodieMergedLogRecordScanner.java:144)
> at org.apache.hudi.metadata.HoodieMetadataMergedLogRecordReader.processNextRecord(HoodieMetadataMergedLogRecordReader.java:78)
> at org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.processDataBlock(AbstractHoodieLogRecordReader.java:369)
> at org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.processQueuedBlocksForInstant(AbstractHoodieLogRecordReader.java:431)
> at org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scan(AbstractHoodieLogRecordReader.java:239)
> ... 61 more at org.apache.hive.jdbc.HiveStatement.waitForOperationToComplete(HiveStatement.java:385)
> at org.apache.hive.jdbc.HiveStatement.execute(HiveStatement.java:254)
> at org.apache.hive.jdbc.HiveStatement.executeQuery(HiveStatement.java:476)
> at org.apache.hudi.cli.utils.HiveUtil.countRecords(HiveUtil.java:58)
> at org.apache.hudi.cli.commands.HoodieSyncCommand.validateSync(HoodieSyncCommand.java:69)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498)
> at org.springframework.util.ReflectionUtils.invokeMethod(ReflectionUtils.java:216)
> ... 5 more{code}
> [https://gist.github.com/yihua/2c7481fbbd509ca7a81e5d0ea7efc2fd]
--
This message was sent by Atlassian Jira
(v8.20.1#820001)