You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Hyukjin Kwon (JIRA)" <ji...@apache.org> on 2019/05/21 04:23:23 UTC

[jira] [Updated] (SPARK-15872) Dataset of Array of Custom case class throws MissingRequirementError

     [ https://issues.apache.org/jira/browse/SPARK-15872?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Hyukjin Kwon updated SPARK-15872:
---------------------------------
    Labels: bulk-closed  (was: )

> Dataset of Array of Custom case class throws MissingRequirementError
> --------------------------------------------------------------------
>
>                 Key: SPARK-15872
>                 URL: https://issues.apache.org/jira/browse/SPARK-15872
>             Project: Spark
>          Issue Type: Bug
>    Affects Versions: 1.6.1
>            Reporter: Petr Votava
>            Priority: Minor
>              Labels: bulk-closed
>
> example:
> {code:scala}
> import org.apache.spark.SparkContext
> import org.apache.spark.SparkConf
> import org.apache.spark.sql.{SQLContext, Dataset}
> case class Custom(a: String)
> object Main {
>   def main(args: Array[String]) {
>     val conf = new SparkConf()
>         .setAppName("test-spark-bug")
>     val sc = new SparkContext(conf)
>     val sqlContext = new SQLContext(sc)
>     import sqlContext.implicits._
>     val arr: Seq[Custom] = Array(new Custom("a"))
>     val dataset = arr.toDS()
>     dataset.take(1).foreach(println)
>     dataset.map(x => Array(x)).take(1).foreach(println)
>   }
> }
> {code}
> throws 
> Exception in thread "main" scala.reflect.internal.MissingRequirementError: class Custom not found.
> 	at scala.reflect.internal.MissingRequirementError$.signal(MissingRequirementError.scala:16)
> 	at scala.reflect.internal.MissingRequirementError$.notFound(MissingRequirementError.scala:17)
> 	at scala.reflect.internal.Mirrors$RootsBase.ensureClassSymbol(Mirrors.scala:90)
> 	at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:119)
> 	at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:21)
> 	at Main$$typecreator2$1.apply(Main.scala:18)
> 	at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:231)
> 	at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:231)
> 	at org.apache.spark.sql.SQLImplicits$$typecreator19$1.apply(SQLImplicits.scala:126)
> 	at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:231)
> 	at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:231)
> 	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$.apply(ExpressionEncoder.scala:50)
> 	at org.apache.spark.sql.SQLImplicits.newProductArrayEncoder(SQLImplicits.scala:126)
> 	at Main$.main(Main.scala:18)
> 	at Main.main(Main.scala)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:497)
> 	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
> 	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
> 	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
> 	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
> 	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> expected output:
> Custom("a")
> Custom("a")



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org