You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by "Raymond Xu (Jira)" <ji...@apache.org> on 2022/10/19 13:05:00 UTC

[jira] [Updated] (HUDI-4647) Change the default value of HIVE_SYNC_MODE in MergeInto to HMS

     [ https://issues.apache.org/jira/browse/HUDI-4647?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Raymond Xu updated HUDI-4647:
-----------------------------
    Epic Link:   (was: HUDI-2519)

> Change the default value of HIVE_SYNC_MODE in MergeInto to HMS
> --------------------------------------------------------------
>
>                 Key: HUDI-4647
>                 URL: https://issues.apache.org/jira/browse/HUDI-4647
>             Project: Apache Hudi
>          Issue Type: Improvement
>          Components: spark-sql
>            Reporter: 董可伦
>            Assignee: 董可伦
>            Priority: Major
>              Labels: pull-request-available
>             Fix For: 1.0.0
>
>
>  
> {code:java}
> org.apache.hudi.exception.HoodieException: Could not sync using the meta sync class org.apache.hudi.hive.HiveSyncTool
>         at org.apache.hudi.sync.common.util.SyncUtilHelpers.runHoodieMetaSync(SyncUtilHelpers.java:58)
>         at org.apache.hudi.HoodieSparkSqlWriter$$anonfun$metaSync$2.apply(HoodieSparkSqlWriter.scala:648)
>         at org.apache.hudi.HoodieSparkSqlWriter$$anonfun$metaSync$2.apply(HoodieSparkSqlWriter.scala:647)
>         at scala.collection.mutable.HashSet.foreach(HashSet.scala:78)
>         at org.apache.hudi.HoodieSparkSqlWriter$.metaSync(HoodieSparkSqlWriter.scala:647)
>         at org.apache.hudi.HoodieSparkSqlWriter$.commitAndPerformPostOperations(HoodieSparkSqlWriter.scala:733)
>         at org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:336)
>         at org.apache.spark.sql.hudi.command.MergeIntoHoodieTableCommand.executeUpsert(MergeIntoHoodieTableCommand.scala:318)
>         at org.apache.spark.sql.hudi.command.MergeIntoHoodieTableCommand.run(MergeIntoHoodieTableCommand.scala:154)
>         at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
>         at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
>         at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
>         at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:194)
>         at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:194)
>         at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3370)
>         at org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:80)
>         at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:127)
>         at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:75)
>         at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3369)
>         at org.apache.spark.sql.Dataset.<init>(Dataset.scala:194)
>         at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:79)
>         at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642)
>         at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:694)
>         at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:62)
>         at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:371)
>         at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:376)
>         at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:274)
>         at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:498)
>         at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
>         at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:845)
>         at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:161)
>         at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:184)
>         at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
>         at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:920)
>         at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:929)
>         at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> Caused by: org.apache.hudi.exception.HoodieException: Unable to instantiate class org.apache.hudi.hive.HiveSyncTool
>         at org.apache.hudi.common.util.ReflectionUtils.loadClass(ReflectionUtils.java:91)
>         at org.apache.hudi.sync.common.util.SyncUtilHelpers.instantiateMetaSyncTool(SyncUtilHelpers.java:75)
>         at org.apache.hudi.sync.common.util.SyncUtilHelpers.runHoodieMetaSync(SyncUtilHelpers.java:56)
>         ... 39 more
> Caused by: java.lang.reflect.InvocationTargetException
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>         at org.apache.hudi.common.util.ReflectionUtils.loadClass(ReflectionUtils.java:89)
>         ... 41 more
> Caused by: org.apache.hudi.hive.HoodieHiveSyncException: Got runtime exception when hive syncing
>         at org.apache.hudi.hive.HiveSyncTool.initSyncClient(HiveSyncTool.java:106)
>         at org.apache.hudi.hive.HiveSyncTool.<init>(HiveSyncTool.java:95)
>         ... 46 more
> Caused by: org.apache.hudi.hive.HoodieHiveSyncException: Failed to create HiveMetaStoreClient
>         at org.apache.hudi.hive.HoodieHiveSyncClient.<init>(HoodieHiveSyncClient.java:95)
>         at org.apache.hudi.hive.HiveSyncTool.initSyncClient(HiveSyncTool.java:101)
>         ... 47 more
> Caused by: org.apache.hudi.hive.HoodieHiveSyncException: Cannot create hive connection jdbc:hive2://localhost:10000/
>         at org.apache.hudi.hive.ddl.JDBCExecutor.createHiveConnection(JDBCExecutor.java:107)
>         at org.apache.hudi.hive.ddl.JDBCExecutor.<init>(JDBCExecutor.java:59)
>         at org.apache.hudi.hive.HoodieHiveSyncClient.<init>(HoodieHiveSyncClient.java:91)
>         ... 48 more
> Caused by: java.sql.SQLException: Could not open client transport with JDBC Uri: jdbc:hive2://localhost:10000: java.net.ConnectException: Connection refused (Connection refused)
>         at org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:234)
>         at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:178)
>         at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:105)
>         at java.sql.DriverManager.getConnection(DriverManager.java:664)
>         at java.sql.DriverManager.getConnection(DriverManager.java:247)
>         at org.apache.hudi.hive.ddl.JDBCExecutor.createHiveConnection(JDBCExecutor.java:104)
>         ... 50 more
> Caused by: org.apache.thrift.transport.TTransportException: java.net.ConnectException: Connection refused (Connection refused)
>         at org.apache.thrift.transport.TSocket.open(TSocket.java:226)
>         at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:266)
>         at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
>         at org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:206)
>         ... 55 more
> Caused by: java.net.ConnectException: Connection refused (Connection refused)
>         at java.net.PlainSocketImpl.socketConnect(Native Method)
>         at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350)
>         at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206)
>         at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188)
>         at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
>         at java.net.Socket.connect(Socket.java:589)
>         at org.apache.thrift.transport.TSocket.open(TSocket.java:221)
>         ... 58 more{code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)