You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Dongjoon Hyun (Jira)" <ji...@apache.org> on 2020/03/11 19:37:00 UTC

[jira] [Closed] (SPARK-22523) Janino throws StackOverflowError on nested structs with many fields

     [ https://issues.apache.org/jira/browse/SPARK-22523?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Dongjoon Hyun closed SPARK-22523.
---------------------------------

> Janino throws StackOverflowError on nested structs with many fields
> -------------------------------------------------------------------
>
>                 Key: SPARK-22523
>                 URL: https://issues.apache.org/jira/browse/SPARK-22523
>             Project: Spark
>          Issue Type: Sub-task
>          Components: Spark Core, SQL
>    Affects Versions: 2.2.0
>         Environment: * Linux
> * Scala: 2.11.8
> * Spark: 2.2.0
>            Reporter: Utku Demir
>            Priority: Minor
>
> When running the below application, Janino throws StackOverflow:
> {code}
> Exception in thread "main" java.lang.StackOverflowError
> 	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:370)
> 	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
> 	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
> 	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
> 	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
> {code}
> Problematic code:
> {code:title=Example.scala|borderStyle=solid}
> import org.apache.spark.sql._
> case class Foo(
>   f1: Int = 0,
>   f2: Int = 0,
>   f3: Int = 0,
>   f4: Int = 0,
>   f5: Int = 0,
>   f6: Int = 0,
>   f7: Int = 0,
>   f8: Int = 0,
>   f9: Int = 0,
>   f10: Int = 0,
>   f11: Int = 0,
>   f12: Int = 0,
>   f13: Int = 0,
>   f14: Int = 0,
>   f15: Int = 0,
>   f16: Int = 0,
>   f17: Int = 0,
>   f18: Int = 0,
>   f19: Int = 0,
>   f20: Int = 0,
>   f21: Int = 0,
>   f22: Int = 0,
>   f23: Int = 0,
>   f24: Int = 0
> )
> case class Nest[T](
>   a: T,
>   b: T
> )
> object Nest {
>   def apply[T](t: T): Nest[T] = new Nest(t, t)
> }
> object Main {
>   def main(args: Array[String]) {
>     val spark: SparkSession = SparkSession.builder().appName("test").master("local[*]").getOrCreate()
>     import spark.implicits._
>     val foo = Foo(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
>     Seq.fill(10)(Nest(Nest(foo))).toDS.groupByKey(identity).count.map(s => s).collect
>   }
> }
> {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org