You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Yuming Wang (Jira)" <ji...@apache.org> on 2021/12/11 03:01:00 UTC

[jira] [Resolved] (SPARK-37565) Upgrade mysql-connector-java to 8.0.27

     [ https://issues.apache.org/jira/browse/SPARK-37565?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Yuming Wang resolved SPARK-37565.
---------------------------------
    Fix Version/s: 3.3.0
       Resolution: Fixed

Issue resolved by pull request 34827
[https://github.com/apache/spark/pull/34827]

> Upgrade mysql-connector-java to 8.0.27
> --------------------------------------
>
>                 Key: SPARK-37565
>                 URL: https://issues.apache.org/jira/browse/SPARK-37565
>             Project: Spark
>          Issue Type: Improvement
>          Components: SQL, Tests
>    Affects Versions: 3.3.0
>            Reporter: Yuming Wang
>            Assignee: Yuming Wang
>            Priority: Major
>             Fix For: 3.3.0
>
>
> Current 5.1.38 will throw SSLHandshakeException:
> {noformat}
> Mon Dec 06 22:31:06 GMT-07:00 2021 WARN: Establishing SSL connection without server's identity verification is not recommended. According to MySQL 5.5.45+, 5.6.26+ and 5.7.6+ requirements SSL connection must be established by default if explicit option isn't set. For compliance with existing applications not using SSL the verifyServerCertificate property is set to 'false'. You need either to explicitly disable SSL by setting useSSL=false, or set useSSL=true and provide truststore for server certificate verification.
> com.mysql.jdbc.exceptions.jdbc4.CommunicationsException: Communications link failure
> The last packet successfully received from the server was 27 milliseconds ago.  The last packet sent successfully to the server was 20 milliseconds ago.
> 	at java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> 	at java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> 	at java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> 	at java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)
> 	at com.mysql.jdbc.Util.handleNewInstance(Util.java:404)
> 	at com.mysql.jdbc.SQLError.createCommunicationsException(SQLError.java:981)
> 	at com.mysql.jdbc.ExportControlled.transformSocketToSSLSocket(ExportControlled.java:164)
> 	at com.mysql.jdbc.MysqlIO.negotiateSSLConnection(MysqlIO.java:4801)
> 	at com.mysql.jdbc.MysqlIO.proceedHandshakeWithPluggableAuthentication(MysqlIO.java:1643)
> 	at com.mysql.jdbc.MysqlIO.doHandshake(MysqlIO.java:1215)
> 	at com.mysql.jdbc.ConnectionImpl.coreConnect(ConnectionImpl.java:2255)
> 	at com.mysql.jdbc.ConnectionImpl.connectOneTryOnly(ConnectionImpl.java:2286)
> 	at com.mysql.jdbc.ConnectionImpl.createNewIO(ConnectionImpl.java:2085)
> 	at com.mysql.jdbc.ConnectionImpl.<init>(ConnectionImpl.java:795)
> 	at com.mysql.jdbc.JDBC4Connection.<init>(JDBC4Connection.java:44)
> 	at java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> 	at java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> 	at java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> 	at java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)
> 	at com.mysql.jdbc.Util.handleNewInstance(Util.java:404)
> 	at com.mysql.jdbc.ConnectionImpl.getInstance(ConnectionImpl.java:400)
> 	at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:327)
> 	at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.$anonfun$createConnectionFactory$1(JdbcUtils.scala:64)
> 	at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:56)
> 	at org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation$.getSchema(JDBCRelation.scala:226)
> 	at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:35)
> 	at org.apache.spark.sql.sources.RelationProvider.createRelation(interfaces.scala:87)
> 	at org.apache.spark.sql.sources.RelationProvider.createRelation$(interfaces.scala:83)
> 	at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:24)
> 	at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:341)
> 	at org.apache.spark.sql.execution.command.CreateDataSourceTableCommand.run(createDataSourceTables.scala:86)
> 	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:71)
> 	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:69)
> 	at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:80)
> 	at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:231)
> 	at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3641)
> 	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:105)
> 	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:165)
> 	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
> 	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:776)
> 	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:67)
> 	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3639)
> 	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:231)
> 	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:102)
> 	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:776)
> 	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:99)
> 	at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:619)
> 	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:776)
> 	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:614)
> 	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:661)
> 	at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:63)
> 	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:377)
> 	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:496)
> 	at scala.collection.Iterator.foreach(Iterator.scala:941)
> 	at scala.collection.Iterator.foreach$(Iterator.scala:941)
> 	at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
> 	at scala.collection.IterableLike.foreach(IterableLike.scala:74)
> 	at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
> 	at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
> 	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:490)
> 	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:282)
> 	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
> 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> 	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
> 	at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
> 	at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:926)
> 	at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
> 	at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
> 	at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
> 	at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1005)
> 	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1014)
> 	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> Caused by: javax.net.ssl.SSLHandshakeException: No appropriate protocol (protocol is disabled or cipher suites are inappropriate)
> 	at java.base/sun.security.ssl.HandshakeContext.<init>(HandshakeContext.java:170)
> 	at java.base/sun.security.ssl.ClientHandshakeContext.<init>(ClientHandshakeContext.java:98)
> 	at java.base/sun.security.ssl.TransportContext.kickstart(TransportContext.java:238)
> 	at java.base/sun.security.ssl.SSLSocketImpl.startHandshake(SSLSocketImpl.java:394)
> 	at java.base/sun.security.ssl.SSLSocketImpl.startHandshake(SSLSocketImpl.java:373)
> 	at com.mysql.jdbc.ExportControlled.transformSocketToSSLSocket(ExportControlled.java:149)
> 	... 67 more
> {noformat}



--
This message was sent by Atlassian Jira
(v8.20.1#820001)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org