You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "张刘强 (Jira)" <ji...@apache.org> on 2022/08/30 09:53:00 UTC
[jira] [Updated] (SPARK-40274) ArrayIndexOutOfBoundsException in BytecodeReadingParanamer
[ https://issues.apache.org/jira/browse/SPARK-40274?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
张刘强 updated SPARK-40274:
------------------------
Docs Text: (was: code like this:
val dataFrame: DataFrame = sparkSession.read
.format(JDBC)
.option(SSL, TRUE)
.option(SSL_VERIFICATION, NONE)
.option(DRIVER,
if (DatasourceTaskType.RESOURCE_DIRECTORY.name == inputDataSourceInfo.getSourceType) {
COM_TRINO_JDBC_DRIVER
} else {
JdbcParamsUtil.getDriver(DbType.valueOf(inputDataSourceInfo.getDatabaseType).getCode)
})
.option(URL, if (DatasourceTaskType.RESOURCE_DIRECTORY.name == inputDataSourceInfo.getSourceType) {
inputDataSourceInfo.getAddress
} else {
inputDataSourceInfo.getJdbcUrl
})
.option(USER, inputDataSourceInfo.getUser)
.option(PASSWORD, inputDataSourceInfo.getPassword)
.option("keepAlive", TRUE)
.option(QUERY, baseSql)
.load
columns = dataFrame.columns
val count: Long = dataFrame.count())
> ArrayIndexOutOfBoundsException in BytecodeReadingParanamer
> ----------------------------------------------------------
>
> Key: SPARK-40274
> URL: https://issues.apache.org/jira/browse/SPARK-40274
> Project: Spark
> Issue Type: Bug
> Components: Spark Core, SQL
> Affects Versions: 3.1.2
> Environment: <dependency>
> <groupId>com.fasterxml.jackson.core</groupId>
> <artifactId>jackson-core</artifactId>
> <version>2.10.5</version>
> </dependency>
> <dependency>
> <groupId>com.fasterxml.jackson.core</groupId>
> <artifactId>jackson-databind</artifactId>
> <version>2.10.5</version>
> </dependency>
> <dependency>
> <groupId>com.fasterxml.jackson.core</groupId>
> <artifactId>jackson-annotations</artifactId>
> <version>2.10.5</version>
> </dependency>
> <dependency>
> <groupId>com.fasterxml.jackson.module</groupId>
> <artifactId>jackson-module-scala_2.12</artifactId>
> <version>2.10.5</version>
> </dependency>
> <dependency>
> <groupId>com.thoughtworks.paranamer</groupId>
> <artifactId>paranamer</artifactId>
> <version>2.8</version>
> </dependency>
> <dependency>
> <groupId>org.apache.spark</groupId>
> <artifactId>spark-core_2.12</artifactId>
> <version>3.1.2</version>
> <exclusions>
> <exclusion>
> <groupId>com.fasterxml.jackson.core</groupId>
> <artifactId>jackson-core</artifactId>
> </exclusion>
> <exclusion>
> <groupId>com.fasterxml.jackson.core</groupId>
> <artifactId>jackson-databind</artifactId>
> </exclusion>
> <exclusion>
> <groupId>com.fasterxml.jackson.module</groupId>
> <artifactId>jackson-module-scala_2.12</artifactId>
> </exclusion>
> </exclusions>
> </dependency>
> <dependency>
> <groupId>org.apache.spark</groupId>
> <artifactId>spark-sql_2.12</artifactId>
> <version>3.1.2</version>
> <exclusions>
> <exclusion>
> <groupId>com.fasterxml.jackson.core</groupId>
> <artifactId>jackson-core</artifactId>
> </exclusion>
> <exclusion>
> <groupId>com.fasterxml.jackson.core</groupId>
> <artifactId>jackson-databind</artifactId>
> </exclusion>
> <exclusion>
> <groupId>com.fasterxml.jackson.module</groupId>
> <artifactId>jackson-module-scala_2.12</artifactId>
> </exclusion>
> </exclusions>
> </dependency>
> Reporter: 张刘强
> Priority: Major
>
> spark 3.1.2 scala 2.12.10 jdk 1.8 linux
>
> when use dataframe.count will throw this exception:
>
> stacktrace like this:
>
> java.lang.ArrayIndexOutOfBoundsException: Index 28499 out of bounds for length 206
> at com.thoughtworks.paranamer.BytecodeReadingParanamer$ClassReader.accept(BytecodeReadingParanamer.java:532)
> at com.thoughtworks.paranamer.BytecodeReadingParanamer$ClassReader.access$200(BytecodeReadingParanamer.java:315)
> at com.thoughtworks.paranamer.BytecodeReadingParanamer.lookupParameterNames(BytecodeReadingParanamer.java:102)
> at com.thoughtworks.paranamer.CachingParanamer.lookupParameterNames(CachingParanamer.java:76)
> at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.getCtorParams(BeanIntrospector.scala:45)
> at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.$anonfun$apply$1(BeanIntrospector.scala:59)
> at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.$anonfun$apply$1$adapted(BeanIntrospector.scala:59)
> at scala.collection.TraversableLike.$anonfun$flatMap$1(TraversableLike.scala:292)
> at scala.collection.Iterator.foreach(Iterator.scala:943)
> at scala.collection.Iterator.foreach$(Iterator.scala:943)
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
> at scala.collection.IterableLike.foreach(IterableLike.scala:74)
> at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
> at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
> at scala.collection.TraversableLike.flatMap(TraversableLike.scala:292)
> at scala.collection.TraversableLike.flatMap$(TraversableLike.scala:289)
> at scala.collection.AbstractTraversable.flatMap(Traversable.scala:108)
> at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.findConstructorParam$1(BeanIntrospector.scala:59)
> at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.$anonfun$apply$19(BeanIntrospector.scala:181)
> at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:285)
> at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
> at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
> at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
> at scala.collection.TraversableLike.map(TraversableLike.scala:285)
> at scala.collection.TraversableLike.map$(TraversableLike.scala:278)
> at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:198)
> at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.$anonfun$apply$14(BeanIntrospector.scala:175)
> at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.$anonfun$apply$14$adapted(BeanIntrospector.scala:174)
> at scala.collection.immutable.List.flatMap(List.scala:366)
> at com.fasterxml.jackson.module.scala.introspect.BeanIntrospector$.apply(BeanIntrospector.scala:174)
> at com.fasterxml.jackson.module.scala.introspect.ScalaAnnotationIntrospector$._descriptorFor(ScalaAnnotationIntrospectorModule.scala:20)
> at com.fasterxml.jackson.module.scala.introspect.ScalaAnnotationIntrospector$.fieldName(ScalaAnnotationIntrospectorModule.scala:28)
> at com.fasterxml.jackson.module.scala.introspect.ScalaAnnotationIntrospector$.findImplicitPropertyName(ScalaAnnotationIntrospectorModule.scala:80)
> at com.fasterxml.jackson.databind.introspect.AnnotationIntrospectorPair.findImplicitPropertyName(AnnotationIntrospectorPair.java:490)
> at com.fasterxml.jackson.databind.introspect.POJOPropertiesCollector._addFields(POJOPropertiesCollector.java:380)
> at com.fasterxml.jackson.databind.introspect.POJOPropertiesCollector.collectAll(POJOPropertiesCollector.java:308)
> at com.fasterxml.jackson.databind.introspect.POJOPropertiesCollector.getJsonValueAccessor(POJOPropertiesCollector.java:196)
> at com.fasterxml.jackson.databind.introspect.BasicBeanDescription.findJsonValueAccessor(BasicBeanDescription.java:252)
> at com.fasterxml.jackson.databind.ser.BasicSerializerFactory.findSerializerByAnnotations(BasicSerializerFactory.java:346)
> at com.fasterxml.jackson.databind.ser.BeanSerializerFactory._createSerializer2(BeanSerializerFactory.java:216)
--
This message was sent by Atlassian Jira
(v8.20.10#820010)
---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org