You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Utku Demir (JIRA)" <ji...@apache.org> on 2017/11/15 03:32:00 UTC

[jira] [Updated] (SPARK-22523) Janino throws StackOverflowError on nested structs with many fields

     [ https://issues.apache.org/jira/browse/SPARK-22523?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Utku Demir updated SPARK-22523:
-------------------------------
    Description: 
When running the below application, Janino throws StackOverflow:

{code}
Exception in thread "main" java.lang.StackOverflowError
	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:370)
	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
{code}

Problematic code:

{code:title=Example.scala|borderStyle=solid}
import org.apache.spark.sql._

case class Foo(
  f1: Int = 0,
  f2: Int = 0,
  f3: Int = 0,
  f4: Int = 0,
  f5: Int = 0,
  f6: Int = 0,
  f7: Int = 0,
  f8: Int = 0,
  f9: Int = 0,
  f10: Int = 0,
  f11: Int = 0,
  f12: Int = 0,
  f13: Int = 0,
  f14: Int = 0,
  f15: Int = 0,
  f16: Int = 0,
  f17: Int = 0,
  f18: Int = 0,
  f19: Int = 0,
  f20: Int = 0,
  f21: Int = 0,
  f22: Int = 0,
  f23: Int = 0,
  f24: Int = 0
)

case class Nest[T](
  a: T,
  b: T
)

object Nest {
  def apply[T](t: T): Nest[T] = new Nest(t, t)
}

object Main {
  def main(args: Array[String]) {
    val spark: SparkSession = SparkSession.builder().appName("test").master("local[*]").getOrCreate()
    import spark.implicits._

    val foo = Foo(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)

    Seq.fill(10)(Nest(Nest(foo))).toDS.groupByKey(identity).count.map(s => s).collect
  }
}
{code}



  was:
When run the below application, Janino throws StackOverflow:

{code}
Exception in thread "main" java.lang.StackOverflowError
	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:370)
	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
{code}

Problematic code:

{code:title=Example.scala|borderStyle=solid}
import org.apache.spark.sql._

case class Foo(
  f1: Int = 0,
  f2: Int = 0,
  f3: Int = 0,
  f4: Int = 0,
  f5: Int = 0,
  f6: Int = 0,
  f7: Int = 0,
  f8: Int = 0,
  f9: Int = 0,
  f10: Int = 0,
  f11: Int = 0,
  f12: Int = 0,
  f13: Int = 0,
  f14: Int = 0,
  f15: Int = 0,
  f16: Int = 0,
  f17: Int = 0,
  f18: Int = 0,
  f19: Int = 0,
  f20: Int = 0,
  f21: Int = 0,
  f22: Int = 0,
  f23: Int = 0,
  f24: Int = 0
)

case class Nest[T](
  a: T,
  b: T
)

object Nest {
  def apply[T](t: T): Nest[T] = new Nest(t, t)
}

object Main {
  def main(args: Array[String]) {
    val spark: SparkSession = SparkSession.builder().appName("test").master("local[*]").getOrCreate()
    import spark.implicits._

    val foo = Foo(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)

    Seq.fill(10)(Nest(Nest(foo))).toDS.groupByKey(identity).count.map(s => s).collect
  }
}
{code}




> Janino throws StackOverflowError on nested structs with many fields
> -------------------------------------------------------------------
>
>                 Key: SPARK-22523
>                 URL: https://issues.apache.org/jira/browse/SPARK-22523
>             Project: Spark
>          Issue Type: Bug
>          Components: Spark Core, SQL
>    Affects Versions: 2.2.0
>         Environment: * Linux
> * Scala: 2.11.8
> * Spark: 2.2.0
>            Reporter: Utku Demir
>            Priority: Minor
>
> When running the below application, Janino throws StackOverflow:
> {code}
> Exception in thread "main" java.lang.StackOverflowError
> 	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:370)
> 	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
> 	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
> 	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
> 	at org.codehaus.janino.CodeContext.flowAnalysis(CodeContext.java:541)
> {code}
> Problematic code:
> {code:title=Example.scala|borderStyle=solid}
> import org.apache.spark.sql._
> case class Foo(
>   f1: Int = 0,
>   f2: Int = 0,
>   f3: Int = 0,
>   f4: Int = 0,
>   f5: Int = 0,
>   f6: Int = 0,
>   f7: Int = 0,
>   f8: Int = 0,
>   f9: Int = 0,
>   f10: Int = 0,
>   f11: Int = 0,
>   f12: Int = 0,
>   f13: Int = 0,
>   f14: Int = 0,
>   f15: Int = 0,
>   f16: Int = 0,
>   f17: Int = 0,
>   f18: Int = 0,
>   f19: Int = 0,
>   f20: Int = 0,
>   f21: Int = 0,
>   f22: Int = 0,
>   f23: Int = 0,
>   f24: Int = 0
> )
> case class Nest[T](
>   a: T,
>   b: T
> )
> object Nest {
>   def apply[T](t: T): Nest[T] = new Nest(t, t)
> }
> object Main {
>   def main(args: Array[String]) {
>     val spark: SparkSession = SparkSession.builder().appName("test").master("local[*]").getOrCreate()
>     import spark.implicits._
>     val foo = Foo(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
>     Seq.fill(10)(Nest(Nest(foo))).toDS.groupByKey(identity).count.map(s => s).collect
>   }
> }
> {code}



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org