You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Andrew Ray (JIRA)" <ji...@apache.org> on 2017/03/23 13:50:42 UTC
[jira] [Resolved] (SPARK-19136) Aggregator with case class as
output type fails with ClassCastException
[ https://issues.apache.org/jira/browse/SPARK-19136?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Andrew Ray resolved SPARK-19136.
--------------------------------
Resolution: Not A Bug
> Aggregator with case class as output type fails with ClassCastException
> -----------------------------------------------------------------------
>
> Key: SPARK-19136
> URL: https://issues.apache.org/jira/browse/SPARK-19136
> Project: Spark
> Issue Type: Bug
> Components: SQL
> Affects Versions: 2.0.2, 2.1.0
> Reporter: Mathieu D
> Priority: Minor
>
> {{Aggregator}} with a case-class as output type returns a Row that cannot be cast back to this type, it fails with {{ClassCastException}}.
> Here is a dummy example to reproduce the problem
> {code}
> import org.apache.spark.sql._
> import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
> import org.apache.spark.sql.expressions.Aggregator
> import spark.implicits._
> case class MinMax(min: Int, max: Int)
> case class MinMaxAgg() extends Aggregator[Row, (Int, Int), MinMax] with Serializable {
> def zero: (Int, Int) = (Int.MaxValue, Int.MinValue)
> def reduce(b: (Int, Int), a: Row): (Int, Int) = (Math.min(b._1, a.getAs[Int](0)), Math.max(b._2, a.getAs[Int](0)))
> def finish(r: (Int, Int)): MinMax = MinMax(r._1, r._2)
> def merge(b1: (Int, Int), b2: (Int, Int)): (Int, Int) = (Math.min(b1._1, b2._1), Math.max(b1._2, b2._2))
> def bufferEncoder: Encoder[(Int, Int)] = ExpressionEncoder()
> def outputEncoder: Encoder[MinMax] = ExpressionEncoder()
> }
> val ds = Seq(1, 2, 3, 4).toDF("col1")
> val agg = ds.agg(MinMaxAgg().toColumn.alias("minmax"))
> {code}
> bq. {code}
> ds: org.apache.spark.sql.DataFrame = [col1: int]
> agg: org.apache.spark.sql.DataFrame = [minmax: struct<min: int, max: int>]
> {code}
> {code}agg.printSchema(){code}
> bq. {code}
> root
> |-- minmax: struct (nullable = true)
> | |-- min: integer (nullable = false)
> | |-- max: integer (nullable = false)
> {code}
> {code}agg.head(){code}
> bq. {code}
> res1: org.apache.spark.sql.Row = [[1,4]]
> {code}
> {code}agg.head().getAs[MinMax](0){code}
> bq. {code}
> java.lang.ClassCastException: org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema cannot be cast to line4c81e18af34342cda654c381ee91139525.$read$$iw$$iw$$iw$$iw$MinMax
> [...]
> {code}
--
This message was sent by Atlassian JIRA
(v6.3.15#6346)
---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org