You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by Alexandr Dzhagriev <dz...@gmail.com> on 2016/02/01 17:50:22 UTC

Failed to 'collect_set' with dataset in spark 1.6

Hello,

I'm trying to run the following example code:

import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.sql.functions._


case class RecordExample(a: Int, b: String)

object ArrayExample {
  def main(args: Array[String]) {
    val conf = new SparkConf()

    val sc = new SparkContext(conf)
    val sqlContext = new HiveContext(sc)

    import sqlContext.implicits._

    val dataset = sc.parallelize(Seq(RecordExample(1, "apple"),
RecordExample(2, "orange"))).toDS()

    dataset.groupBy($"a").agg(collect_list("b").as[List[String]])

    dataset.collect()

  }

}


and it fails with the following (please see the whole stack trace below):

 Exception in thread "main" java.lang.ClassCastException:
org.apache.spark.sql.types.ArrayType cannot be cast to
org.apache.spark.sql.types.StructType


Could please someone point me to the proper way to do that or confirm it's
a bug?

Thank you and here is the whole stacktrace:

Exception in thread "main" java.lang.ClassCastException:
org.apache.spark.sql.types.ArrayType cannot be cast to
org.apache.spark.sql.types.StructType
at org.apache.spark.sql.catalyst.expressions.GetStructField.org
$apache$spark$sql$catalyst$expressions$GetStructField$$field$lzycompute(complexTypeExtractors.scala:107)
at org.apache.spark.sql.catalyst.expressions.GetStructField.org
$apache$spark$sql$catalyst$expressions$GetStructField$$field(complexTypeExtractors.scala:107)
at
org.apache.spark.sql.catalyst.expressions.GetStructField.dataType(complexTypeExtractors.scala:109)
at
org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1214)
at
org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1211)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
at scala.collection.Iterator$class.foreach(Iterator.scala:742)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
at scala.collection.AbstractIterator.to(Iterator.scala:1194)
at
scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
at scala.collection.Iterator$class.foreach(Iterator.scala:742)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
at scala.collection.AbstractIterator.to(Iterator.scala:1194)
at
scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
at scala.collection.immutable.List.foreach(List.scala:381)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
at scala.collection.immutable.List.map(List.scala:285)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
at scala.collection.Iterator$class.foreach(Iterator.scala:742)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
at scala.collection.AbstractIterator.to(Iterator.scala:1194)
at
scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
at
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
at scala.collection.Iterator$class.foreach(Iterator.scala:742)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
at scala.collection.AbstractIterator.to(Iterator.scala:1194)
at
scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
at scala.collection.Iterator$class.foreach(Iterator.scala:742)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
at scala.collection.AbstractIterator.to(Iterator.scala:1194)
at
scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
at
org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionDown$1(QueryPlan.scala:75)
at org.apache.spark.sql.catalyst.plans.QueryPlan.org
$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:85)
at
org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1$1.apply(QueryPlan.scala:89)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
at scala.collection.immutable.List.foreach(List.scala:381)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
at scala.collection.immutable.List.map(List.scala:285)
at org.apache.spark.sql.catalyst.plans.QueryPlan.org
$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:89)
at
org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$1.apply(QueryPlan.scala:93)
at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
at scala.collection.Iterator$class.foreach(Iterator.scala:742)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
at scala.collection.AbstractIterator.to(Iterator.scala:1194)
at
scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
at
org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsDown(QueryPlan.scala:93)
at
org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressions(QueryPlan.scala:64)
at
org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:134)
at
org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:133)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:233)
at
org.apache.spark.sql.catalyst.plans.QueryPlan.transformAllExpressions(QueryPlan.scala:133)
at
org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1211)
at
org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1195)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:83)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:80)
at
scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
at scala.collection.immutable.List.foldLeft(List.scala:84)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:80)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:72)
at scala.collection.immutable.List.foreach(List.scala:381)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:72)
at
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.resolve(ExpressionEncoder.scala:253)
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:78)
at org.apache.spark.sql.GroupedDataset.aggUntyped(GroupedDataset.scala:240)
at org.apache.spark.sql.GroupedDataset.agg(GroupedDataset.scala:253)
at ArrayExample$.main(ArrayExample.scala:22)
at ArrayExample.main(ArrayExample.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

Re: Failed to 'collect_set' with dataset in spark 1.6

Posted by Alexandr Dzhagriev <dz...@gmail.com>.
Good to know, thanks.

On Mon, Feb 1, 2016 at 6:57 PM, Ted Yu <yu...@gmail.com> wrote:

> Got around the previous error by adding:
>
> scala> implicit val kryoEncoder = Encoders.kryo[RecordExample]
> kryoEncoder: org.apache.spark.sql.Encoder[RecordExample] = class[value[0]:
> binary]
>
> On Mon, Feb 1, 2016 at 9:55 AM, Alexandr Dzhagriev <dz...@gmail.com>
> wrote:
>
>> Hi,
>>
>> That's another thing: that the Record case class should be outside. I ran
>> it as spark-submit.
>>
>> Thanks, Alex.
>>
>> On Mon, Feb 1, 2016 at 6:41 PM, Ted Yu <yu...@gmail.com> wrote:
>>
>>> Running your sample in spark-shell built in master branch, I got:
>>>
>>> scala> val dataset = sc.parallelize(Seq(RecordExample(1, "apple"),
>>> RecordExample(2, "orange"))).toDS()
>>> org.apache.spark.sql.AnalysisException: Unable to generate an encoder
>>> for inner class `RecordExample` without access to the scope that this class
>>> was defined in. Try moving this class out of its parent class.;
>>>   at
>>> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$$anonfun$3.applyOrElse(ExpressionEncoder.scala:316)
>>>   at
>>> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$$anonfun$3.applyOrElse(ExpressionEncoder.scala:312)
>>>   at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:262)
>>>   at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:262)
>>>   at
>>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
>>>   at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:261)
>>>   at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:251)
>>>   at
>>> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.resolve(ExpressionEncoder.scala:312)
>>>   at org.apache.spark.sql.Dataset.<init>(Dataset.scala:80)
>>>   at org.apache.spark.sql.Dataset.<init>(Dataset.scala:91)
>>>   at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:488)
>>>   at
>>> org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:71)
>>>   ... 53 elided
>>>
>>> On Mon, Feb 1, 2016 at 9:09 AM, Alexandr Dzhagriev <dz...@gmail.com>
>>> wrote:
>>>
>>>> Hello again,
>>>>
>>>> Also I've tried the following snippet with concat_ws:
>>>>
>>>> val dataset = sc.parallelize(Seq(
>>>>   RecordExample(1, "apple"),
>>>>   RecordExample(1, "banana"),
>>>>   RecordExample(2, "orange"))
>>>> ).toDS().groupBy($"a").agg(concat_ws(",", $"b").as[String])
>>>>
>>>> dataset.take(10).foreach(println)
>>>>
>>>>
>>>> which also fails
>>>>
>>>> Exception in thread "main" org.apache.spark.sql.AnalysisException:
>>>> expression 'b' is neither present in the group by, nor is it an aggregate
>>>> function. Add to group by or wrap in first() (or first_value) if you don't
>>>> care which value you get.;
>>>> at
>>>> org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.failAnalysis(CheckAnalysis.scala:38)
>>>> at
>>>> org.apache.spark.sql.catalyst.analysis.Analyzer.failAnalysis(Analyzer.scala:44)
>>>> at
>>>> org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$
>>>> 1.org
>>>> $apache$spark$sql$catalyst$analysis$CheckAnalysis$class$$anonfun$$checkValidAggregateExpression$1(CheckAnalysis.scala:130)
>>>>
>>>> Thanks, Alex.
>>>>
>>>> On Mon, Feb 1, 2016 at 6:03 PM, Alexandr Dzhagriev <dz...@gmail.com>
>>>> wrote:
>>>>
>>>>> Hi Ted,
>>>>>
>>>>> That doesn't help neither as one method delegates to another as far as
>>>>> I can see:
>>>>>
>>>>> def collect_list(columnName: String): Column = collect_list(Column(columnName))
>>>>>
>>>>>
>>>>> Thanks, Alex
>>>>>
>>>>> On Mon, Feb 1, 2016 at 5:55 PM, Ted Yu <yu...@gmail.com> wrote:
>>>>>
>>>>>> bq. agg(collect_list("b")
>>>>>>
>>>>>> Have you tried:
>>>>>>
>>>>>> agg(collect_list($"b")
>>>>>>
>>>>>> On Mon, Feb 1, 2016 at 8:50 AM, Alexandr Dzhagriev <dz...@gmail.com>
>>>>>> wrote:
>>>>>>
>>>>>>> Hello,
>>>>>>>
>>>>>>> I'm trying to run the following example code:
>>>>>>>
>>>>>>> import org.apache.spark.sql.hive.HiveContext
>>>>>>> import org.apache.spark.{SparkContext, SparkConf}
>>>>>>> import org.apache.spark.sql.functions._
>>>>>>>
>>>>>>>
>>>>>>> case class RecordExample(a: Int, b: String)
>>>>>>>
>>>>>>> object ArrayExample {
>>>>>>>   def main(args: Array[String]) {
>>>>>>>     val conf = new SparkConf()
>>>>>>>
>>>>>>>     val sc = new SparkContext(conf)
>>>>>>>     val sqlContext = new HiveContext(sc)
>>>>>>>
>>>>>>>     import sqlContext.implicits._
>>>>>>>
>>>>>>>     val dataset = sc.parallelize(Seq(RecordExample(1, "apple"), RecordExample(2, "orange"))).toDS()
>>>>>>>
>>>>>>>     dataset.groupBy($"a").agg(collect_list("b").as[List[String]])
>>>>>>>
>>>>>>>     dataset.collect()
>>>>>>>
>>>>>>>   }
>>>>>>>
>>>>>>> }
>>>>>>>
>>>>>>>
>>>>>>> and it fails with the following (please see the whole stack trace
>>>>>>> below):
>>>>>>>
>>>>>>>  Exception in thread "main" java.lang.ClassCastException:
>>>>>>> org.apache.spark.sql.types.ArrayType cannot be cast to
>>>>>>> org.apache.spark.sql.types.StructType
>>>>>>>
>>>>>>>
>>>>>>> Could please someone point me to the proper way to do that or
>>>>>>> confirm it's a bug?
>>>>>>>
>>>>>>> Thank you and here is the whole stacktrace:
>>>>>>>
>>>>>>> Exception in thread "main" java.lang.ClassCastException:
>>>>>>> org.apache.spark.sql.types.ArrayType cannot be cast to
>>>>>>> org.apache.spark.sql.types.StructType
>>>>>>> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
>>>>>>> $apache$spark$sql$catalyst$expressions$GetStructField$$field$lzycompute(complexTypeExtractors.scala:107)
>>>>>>> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
>>>>>>> $apache$spark$sql$catalyst$expressions$GetStructField$$field(complexTypeExtractors.scala:107)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.expressions.GetStructField.dataType(complexTypeExtractors.scala:109)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1214)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1211)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>>>> at
>>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>>>> at
>>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>>>> at scala.collection.TraversableOnce$class.to
>>>>>>> (TraversableOnce.scala:308)
>>>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>>>> at
>>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>>>> at
>>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>>>> at scala.collection.TraversableOnce$class.to
>>>>>>> (TraversableOnce.scala:308)
>>>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
>>>>>>> at
>>>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>>>> at
>>>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>>>>>> at
>>>>>>> scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>>>>>> at scala.collection.immutable.List.map(List.scala:285)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
>>>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>>>> at
>>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>>>> at
>>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>>>> at scala.collection.TraversableOnce$class.to
>>>>>>> (TraversableOnce.scala:308)
>>>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
>>>>>>> at
>>>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>>>> at
>>>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>>>> at
>>>>>>> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
>>>>>>> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
>>>>>>> at
>>>>>>> scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>>>>>> at scala.collection.AbstractTraversable.map(Traversable.scala:104)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
>>>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>>>> at
>>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>>>> at
>>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>>>> at scala.collection.TraversableOnce$class.to
>>>>>>> (TraversableOnce.scala:308)
>>>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>>>> at
>>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>>>> at
>>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>>>> at scala.collection.TraversableOnce$class.to
>>>>>>> (TraversableOnce.scala:308)
>>>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionDown$1(QueryPlan.scala:75)
>>>>>>> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
>>>>>>> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:85)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1$1.apply(QueryPlan.scala:89)
>>>>>>> at
>>>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>>>> at
>>>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>>>>>> at
>>>>>>> scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>>>>>> at scala.collection.immutable.List.map(List.scala:285)
>>>>>>> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
>>>>>>> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:89)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$1.apply(QueryPlan.scala:93)
>>>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>>>> at
>>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>>>> at
>>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>>>> at scala.collection.TraversableOnce$class.to
>>>>>>> (TraversableOnce.scala:308)
>>>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>>>> at
>>>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsDown(QueryPlan.scala:93)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressions(QueryPlan.scala:64)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:134)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:133)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:233)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformAllExpressions(QueryPlan.scala:133)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1211)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1195)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:83)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:80)
>>>>>>> at
>>>>>>> scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
>>>>>>> at scala.collection.immutable.List.foldLeft(List.scala:84)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:80)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:72)
>>>>>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:72)
>>>>>>> at
>>>>>>> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.resolve(ExpressionEncoder.scala:253)
>>>>>>> at org.apache.spark.sql.Dataset.<init>(Dataset.scala:78)
>>>>>>> at
>>>>>>> org.apache.spark.sql.GroupedDataset.aggUntyped(GroupedDataset.scala:240)
>>>>>>> at org.apache.spark.sql.GroupedDataset.agg(GroupedDataset.scala:253)
>>>>>>> at ArrayExample$.main(ArrayExample.scala:22)
>>>>>>> at ArrayExample.main(ArrayExample.scala)
>>>>>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>>>>> at
>>>>>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>>>>>> at
>>>>>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>>>>> at java.lang.reflect.Method.invoke(Method.java:497)
>>>>>>> at
>>>>>>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>>>>>>> at
>>>>>>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>>>>>>> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>>>>>>> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>>>>>>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>>>>>>
>>>>>>>
>>>>>>>
>>>>>>>
>>>>>>
>>>>>
>>>>
>>>
>>
>

Re: Failed to 'collect_set' with dataset in spark 1.6

Posted by Ted Yu <yu...@gmail.com>.
Got around the previous error by adding:

scala> implicit val kryoEncoder = Encoders.kryo[RecordExample]
kryoEncoder: org.apache.spark.sql.Encoder[RecordExample] = class[value[0]:
binary]

On Mon, Feb 1, 2016 at 9:55 AM, Alexandr Dzhagriev <dz...@gmail.com> wrote:

> Hi,
>
> That's another thing: that the Record case class should be outside. I ran
> it as spark-submit.
>
> Thanks, Alex.
>
> On Mon, Feb 1, 2016 at 6:41 PM, Ted Yu <yu...@gmail.com> wrote:
>
>> Running your sample in spark-shell built in master branch, I got:
>>
>> scala> val dataset = sc.parallelize(Seq(RecordExample(1, "apple"),
>> RecordExample(2, "orange"))).toDS()
>> org.apache.spark.sql.AnalysisException: Unable to generate an encoder for
>> inner class `RecordExample` without access to the scope that this class was
>> defined in. Try moving this class out of its parent class.;
>>   at
>> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$$anonfun$3.applyOrElse(ExpressionEncoder.scala:316)
>>   at
>> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$$anonfun$3.applyOrElse(ExpressionEncoder.scala:312)
>>   at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:262)
>>   at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:262)
>>   at
>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
>>   at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:261)
>>   at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:251)
>>   at
>> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.resolve(ExpressionEncoder.scala:312)
>>   at org.apache.spark.sql.Dataset.<init>(Dataset.scala:80)
>>   at org.apache.spark.sql.Dataset.<init>(Dataset.scala:91)
>>   at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:488)
>>   at
>> org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:71)
>>   ... 53 elided
>>
>> On Mon, Feb 1, 2016 at 9:09 AM, Alexandr Dzhagriev <dz...@gmail.com>
>> wrote:
>>
>>> Hello again,
>>>
>>> Also I've tried the following snippet with concat_ws:
>>>
>>> val dataset = sc.parallelize(Seq(
>>>   RecordExample(1, "apple"),
>>>   RecordExample(1, "banana"),
>>>   RecordExample(2, "orange"))
>>> ).toDS().groupBy($"a").agg(concat_ws(",", $"b").as[String])
>>>
>>> dataset.take(10).foreach(println)
>>>
>>>
>>> which also fails
>>>
>>> Exception in thread "main" org.apache.spark.sql.AnalysisException:
>>> expression 'b' is neither present in the group by, nor is it an aggregate
>>> function. Add to group by or wrap in first() (or first_value) if you don't
>>> care which value you get.;
>>> at
>>> org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.failAnalysis(CheckAnalysis.scala:38)
>>> at
>>> org.apache.spark.sql.catalyst.analysis.Analyzer.failAnalysis(Analyzer.scala:44)
>>> at
>>> org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$
>>> 1.org
>>> $apache$spark$sql$catalyst$analysis$CheckAnalysis$class$$anonfun$$checkValidAggregateExpression$1(CheckAnalysis.scala:130)
>>>
>>> Thanks, Alex.
>>>
>>> On Mon, Feb 1, 2016 at 6:03 PM, Alexandr Dzhagriev <dz...@gmail.com>
>>> wrote:
>>>
>>>> Hi Ted,
>>>>
>>>> That doesn't help neither as one method delegates to another as far as
>>>> I can see:
>>>>
>>>> def collect_list(columnName: String): Column = collect_list(Column(columnName))
>>>>
>>>>
>>>> Thanks, Alex
>>>>
>>>> On Mon, Feb 1, 2016 at 5:55 PM, Ted Yu <yu...@gmail.com> wrote:
>>>>
>>>>> bq. agg(collect_list("b")
>>>>>
>>>>> Have you tried:
>>>>>
>>>>> agg(collect_list($"b")
>>>>>
>>>>> On Mon, Feb 1, 2016 at 8:50 AM, Alexandr Dzhagriev <dz...@gmail.com>
>>>>> wrote:
>>>>>
>>>>>> Hello,
>>>>>>
>>>>>> I'm trying to run the following example code:
>>>>>>
>>>>>> import org.apache.spark.sql.hive.HiveContext
>>>>>> import org.apache.spark.{SparkContext, SparkConf}
>>>>>> import org.apache.spark.sql.functions._
>>>>>>
>>>>>>
>>>>>> case class RecordExample(a: Int, b: String)
>>>>>>
>>>>>> object ArrayExample {
>>>>>>   def main(args: Array[String]) {
>>>>>>     val conf = new SparkConf()
>>>>>>
>>>>>>     val sc = new SparkContext(conf)
>>>>>>     val sqlContext = new HiveContext(sc)
>>>>>>
>>>>>>     import sqlContext.implicits._
>>>>>>
>>>>>>     val dataset = sc.parallelize(Seq(RecordExample(1, "apple"), RecordExample(2, "orange"))).toDS()
>>>>>>
>>>>>>     dataset.groupBy($"a").agg(collect_list("b").as[List[String]])
>>>>>>
>>>>>>     dataset.collect()
>>>>>>
>>>>>>   }
>>>>>>
>>>>>> }
>>>>>>
>>>>>>
>>>>>> and it fails with the following (please see the whole stack trace
>>>>>> below):
>>>>>>
>>>>>>  Exception in thread "main" java.lang.ClassCastException:
>>>>>> org.apache.spark.sql.types.ArrayType cannot be cast to
>>>>>> org.apache.spark.sql.types.StructType
>>>>>>
>>>>>>
>>>>>> Could please someone point me to the proper way to do that or confirm
>>>>>> it's a bug?
>>>>>>
>>>>>> Thank you and here is the whole stacktrace:
>>>>>>
>>>>>> Exception in thread "main" java.lang.ClassCastException:
>>>>>> org.apache.spark.sql.types.ArrayType cannot be cast to
>>>>>> org.apache.spark.sql.types.StructType
>>>>>> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
>>>>>> $apache$spark$sql$catalyst$expressions$GetStructField$$field$lzycompute(complexTypeExtractors.scala:107)
>>>>>> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
>>>>>> $apache$spark$sql$catalyst$expressions$GetStructField$$field(complexTypeExtractors.scala:107)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.expressions.GetStructField.dataType(complexTypeExtractors.scala:109)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1214)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1211)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>>> at
>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>>> at
>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>>> at scala.collection.TraversableOnce$class.to
>>>>>> (TraversableOnce.scala:308)
>>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>>> at
>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>>> at
>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>>> at scala.collection.TraversableOnce$class.to
>>>>>> (TraversableOnce.scala:308)
>>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
>>>>>> at
>>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>>> at
>>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>>>>> at
>>>>>> scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>>>>> at scala.collection.immutable.List.map(List.scala:285)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
>>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>>> at
>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>>> at
>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>>> at scala.collection.TraversableOnce$class.to
>>>>>> (TraversableOnce.scala:308)
>>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
>>>>>> at
>>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>>> at
>>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>>> at
>>>>>> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
>>>>>> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
>>>>>> at
>>>>>> scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>>>>> at scala.collection.AbstractTraversable.map(Traversable.scala:104)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
>>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>>> at
>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>>> at
>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>>> at scala.collection.TraversableOnce$class.to
>>>>>> (TraversableOnce.scala:308)
>>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>>> at
>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>>> at
>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>>> at scala.collection.TraversableOnce$class.to
>>>>>> (TraversableOnce.scala:308)
>>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionDown$1(QueryPlan.scala:75)
>>>>>> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
>>>>>> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:85)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1$1.apply(QueryPlan.scala:89)
>>>>>> at
>>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>>> at
>>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>>>>> at
>>>>>> scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>>>>> at scala.collection.immutable.List.map(List.scala:285)
>>>>>> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
>>>>>> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:89)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$1.apply(QueryPlan.scala:93)
>>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>>> at
>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>>> at
>>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>>> at scala.collection.TraversableOnce$class.to
>>>>>> (TraversableOnce.scala:308)
>>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>>> at
>>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsDown(QueryPlan.scala:93)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressions(QueryPlan.scala:64)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:134)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:133)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:233)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformAllExpressions(QueryPlan.scala:133)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1211)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1195)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:83)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:80)
>>>>>> at
>>>>>> scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
>>>>>> at scala.collection.immutable.List.foldLeft(List.scala:84)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:80)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:72)
>>>>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:72)
>>>>>> at
>>>>>> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.resolve(ExpressionEncoder.scala:253)
>>>>>> at org.apache.spark.sql.Dataset.<init>(Dataset.scala:78)
>>>>>> at
>>>>>> org.apache.spark.sql.GroupedDataset.aggUntyped(GroupedDataset.scala:240)
>>>>>> at org.apache.spark.sql.GroupedDataset.agg(GroupedDataset.scala:253)
>>>>>> at ArrayExample$.main(ArrayExample.scala:22)
>>>>>> at ArrayExample.main(ArrayExample.scala)
>>>>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>>>> at
>>>>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>>>>> at
>>>>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>>>> at java.lang.reflect.Method.invoke(Method.java:497)
>>>>>> at
>>>>>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>>>>>> at
>>>>>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>>>>>> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>>>>>> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>>>>>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>>>>>
>>>>>>
>>>>>>
>>>>>>
>>>>>
>>>>
>>>
>>
>

Re: Failed to 'collect_set' with dataset in spark 1.6

Posted by Alexandr Dzhagriev <dz...@gmail.com>.
Hi,

That's another thing: that the Record case class should be outside. I ran
it as spark-submit.

Thanks, Alex.

On Mon, Feb 1, 2016 at 6:41 PM, Ted Yu <yu...@gmail.com> wrote:

> Running your sample in spark-shell built in master branch, I got:
>
> scala> val dataset = sc.parallelize(Seq(RecordExample(1, "apple"),
> RecordExample(2, "orange"))).toDS()
> org.apache.spark.sql.AnalysisException: Unable to generate an encoder for
> inner class `RecordExample` without access to the scope that this class was
> defined in. Try moving this class out of its parent class.;
>   at
> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$$anonfun$3.applyOrElse(ExpressionEncoder.scala:316)
>   at
> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$$anonfun$3.applyOrElse(ExpressionEncoder.scala:312)
>   at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:262)
>   at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:262)
>   at
> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
>   at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:261)
>   at
> org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:251)
>   at
> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.resolve(ExpressionEncoder.scala:312)
>   at org.apache.spark.sql.Dataset.<init>(Dataset.scala:80)
>   at org.apache.spark.sql.Dataset.<init>(Dataset.scala:91)
>   at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:488)
>   at
> org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:71)
>   ... 53 elided
>
> On Mon, Feb 1, 2016 at 9:09 AM, Alexandr Dzhagriev <dz...@gmail.com>
> wrote:
>
>> Hello again,
>>
>> Also I've tried the following snippet with concat_ws:
>>
>> val dataset = sc.parallelize(Seq(
>>   RecordExample(1, "apple"),
>>   RecordExample(1, "banana"),
>>   RecordExample(2, "orange"))
>> ).toDS().groupBy($"a").agg(concat_ws(",", $"b").as[String])
>>
>> dataset.take(10).foreach(println)
>>
>>
>> which also fails
>>
>> Exception in thread "main" org.apache.spark.sql.AnalysisException:
>> expression 'b' is neither present in the group by, nor is it an aggregate
>> function. Add to group by or wrap in first() (or first_value) if you don't
>> care which value you get.;
>> at
>> org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.failAnalysis(CheckAnalysis.scala:38)
>> at
>> org.apache.spark.sql.catalyst.analysis.Analyzer.failAnalysis(Analyzer.scala:44)
>> at
>> org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$
>> 1.org
>> $apache$spark$sql$catalyst$analysis$CheckAnalysis$class$$anonfun$$checkValidAggregateExpression$1(CheckAnalysis.scala:130)
>>
>> Thanks, Alex.
>>
>> On Mon, Feb 1, 2016 at 6:03 PM, Alexandr Dzhagriev <dz...@gmail.com>
>> wrote:
>>
>>> Hi Ted,
>>>
>>> That doesn't help neither as one method delegates to another as far as I
>>> can see:
>>>
>>> def collect_list(columnName: String): Column = collect_list(Column(columnName))
>>>
>>>
>>> Thanks, Alex
>>>
>>> On Mon, Feb 1, 2016 at 5:55 PM, Ted Yu <yu...@gmail.com> wrote:
>>>
>>>> bq. agg(collect_list("b")
>>>>
>>>> Have you tried:
>>>>
>>>> agg(collect_list($"b")
>>>>
>>>> On Mon, Feb 1, 2016 at 8:50 AM, Alexandr Dzhagriev <dz...@gmail.com>
>>>> wrote:
>>>>
>>>>> Hello,
>>>>>
>>>>> I'm trying to run the following example code:
>>>>>
>>>>> import org.apache.spark.sql.hive.HiveContext
>>>>> import org.apache.spark.{SparkContext, SparkConf}
>>>>> import org.apache.spark.sql.functions._
>>>>>
>>>>>
>>>>> case class RecordExample(a: Int, b: String)
>>>>>
>>>>> object ArrayExample {
>>>>>   def main(args: Array[String]) {
>>>>>     val conf = new SparkConf()
>>>>>
>>>>>     val sc = new SparkContext(conf)
>>>>>     val sqlContext = new HiveContext(sc)
>>>>>
>>>>>     import sqlContext.implicits._
>>>>>
>>>>>     val dataset = sc.parallelize(Seq(RecordExample(1, "apple"), RecordExample(2, "orange"))).toDS()
>>>>>
>>>>>     dataset.groupBy($"a").agg(collect_list("b").as[List[String]])
>>>>>
>>>>>     dataset.collect()
>>>>>
>>>>>   }
>>>>>
>>>>> }
>>>>>
>>>>>
>>>>> and it fails with the following (please see the whole stack trace
>>>>> below):
>>>>>
>>>>>  Exception in thread "main" java.lang.ClassCastException:
>>>>> org.apache.spark.sql.types.ArrayType cannot be cast to
>>>>> org.apache.spark.sql.types.StructType
>>>>>
>>>>>
>>>>> Could please someone point me to the proper way to do that or confirm
>>>>> it's a bug?
>>>>>
>>>>> Thank you and here is the whole stacktrace:
>>>>>
>>>>> Exception in thread "main" java.lang.ClassCastException:
>>>>> org.apache.spark.sql.types.ArrayType cannot be cast to
>>>>> org.apache.spark.sql.types.StructType
>>>>> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
>>>>> $apache$spark$sql$catalyst$expressions$GetStructField$$field$lzycompute(complexTypeExtractors.scala:107)
>>>>> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
>>>>> $apache$spark$sql$catalyst$expressions$GetStructField$$field(complexTypeExtractors.scala:107)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.expressions.GetStructField.dataType(complexTypeExtractors.scala:109)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1214)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1211)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>> at
>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>> at
>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>> at scala.collection.TraversableOnce$class.to
>>>>> (TraversableOnce.scala:308)
>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>> at
>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>> at
>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>> at scala.collection.TraversableOnce$class.to
>>>>> (TraversableOnce.scala:308)
>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
>>>>> at
>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>> at
>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>>>> at
>>>>> scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>>>> at scala.collection.immutable.List.map(List.scala:285)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>> at
>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>> at
>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>> at scala.collection.TraversableOnce$class.to
>>>>> (TraversableOnce.scala:308)
>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
>>>>> at
>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>> at
>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>> at
>>>>> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
>>>>> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
>>>>> at
>>>>> scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>>>> at scala.collection.AbstractTraversable.map(Traversable.scala:104)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>> at
>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>> at
>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>> at scala.collection.TraversableOnce$class.to
>>>>> (TraversableOnce.scala:308)
>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>> at
>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>> at
>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>> at scala.collection.TraversableOnce$class.to
>>>>> (TraversableOnce.scala:308)
>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionDown$1(QueryPlan.scala:75)
>>>>> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
>>>>> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:85)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1$1.apply(QueryPlan.scala:89)
>>>>> at
>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>> at
>>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>>>> at
>>>>> scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>>>> at scala.collection.immutable.List.map(List.scala:285)
>>>>> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
>>>>> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:89)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$1.apply(QueryPlan.scala:93)
>>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>>> at
>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>>> at
>>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>>> at scala.collection.TraversableOnce$class.to
>>>>> (TraversableOnce.scala:308)
>>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>>> at
>>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsDown(QueryPlan.scala:93)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressions(QueryPlan.scala:64)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:134)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:133)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:233)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformAllExpressions(QueryPlan.scala:133)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1211)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1195)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:83)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:80)
>>>>> at
>>>>> scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
>>>>> at scala.collection.immutable.List.foldLeft(List.scala:84)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:80)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:72)
>>>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:72)
>>>>> at
>>>>> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.resolve(ExpressionEncoder.scala:253)
>>>>> at org.apache.spark.sql.Dataset.<init>(Dataset.scala:78)
>>>>> at
>>>>> org.apache.spark.sql.GroupedDataset.aggUntyped(GroupedDataset.scala:240)
>>>>> at org.apache.spark.sql.GroupedDataset.agg(GroupedDataset.scala:253)
>>>>> at ArrayExample$.main(ArrayExample.scala:22)
>>>>> at ArrayExample.main(ArrayExample.scala)
>>>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>>> at
>>>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>>>> at
>>>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>>> at java.lang.reflect.Method.invoke(Method.java:497)
>>>>> at
>>>>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>>>>> at
>>>>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>>>>> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>>>>> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>>>>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>>>>
>>>>>
>>>>>
>>>>>
>>>>
>>>
>>
>

Re: Failed to 'collect_set' with dataset in spark 1.6

Posted by Ted Yu <yu...@gmail.com>.
Running your sample in spark-shell built in master branch, I got:

scala> val dataset = sc.parallelize(Seq(RecordExample(1, "apple"),
RecordExample(2, "orange"))).toDS()
org.apache.spark.sql.AnalysisException: Unable to generate an encoder for
inner class `RecordExample` without access to the scope that this class was
defined in. Try moving this class out of its parent class.;
  at
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$$anonfun$3.applyOrElse(ExpressionEncoder.scala:316)
  at
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$$anonfun$3.applyOrElse(ExpressionEncoder.scala:312)
  at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:262)
  at
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:262)
  at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
  at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:261)
  at
org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:251)
  at
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.resolve(ExpressionEncoder.scala:312)
  at org.apache.spark.sql.Dataset.<init>(Dataset.scala:80)
  at org.apache.spark.sql.Dataset.<init>(Dataset.scala:91)
  at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:488)
  at
org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:71)
  ... 53 elided

On Mon, Feb 1, 2016 at 9:09 AM, Alexandr Dzhagriev <dz...@gmail.com> wrote:

> Hello again,
>
> Also I've tried the following snippet with concat_ws:
>
> val dataset = sc.parallelize(Seq(
>   RecordExample(1, "apple"),
>   RecordExample(1, "banana"),
>   RecordExample(2, "orange"))
> ).toDS().groupBy($"a").agg(concat_ws(",", $"b").as[String])
>
> dataset.take(10).foreach(println)
>
>
> which also fails
>
> Exception in thread "main" org.apache.spark.sql.AnalysisException:
> expression 'b' is neither present in the group by, nor is it an aggregate
> function. Add to group by or wrap in first() (or first_value) if you don't
> care which value you get.;
> at
> org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.failAnalysis(CheckAnalysis.scala:38)
> at
> org.apache.spark.sql.catalyst.analysis.Analyzer.failAnalysis(Analyzer.scala:44)
> at
> org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$
> 1.org
> $apache$spark$sql$catalyst$analysis$CheckAnalysis$class$$anonfun$$checkValidAggregateExpression$1(CheckAnalysis.scala:130)
>
> Thanks, Alex.
>
> On Mon, Feb 1, 2016 at 6:03 PM, Alexandr Dzhagriev <dz...@gmail.com>
> wrote:
>
>> Hi Ted,
>>
>> That doesn't help neither as one method delegates to another as far as I
>> can see:
>>
>> def collect_list(columnName: String): Column = collect_list(Column(columnName))
>>
>>
>> Thanks, Alex
>>
>> On Mon, Feb 1, 2016 at 5:55 PM, Ted Yu <yu...@gmail.com> wrote:
>>
>>> bq. agg(collect_list("b")
>>>
>>> Have you tried:
>>>
>>> agg(collect_list($"b")
>>>
>>> On Mon, Feb 1, 2016 at 8:50 AM, Alexandr Dzhagriev <dz...@gmail.com>
>>> wrote:
>>>
>>>> Hello,
>>>>
>>>> I'm trying to run the following example code:
>>>>
>>>> import org.apache.spark.sql.hive.HiveContext
>>>> import org.apache.spark.{SparkContext, SparkConf}
>>>> import org.apache.spark.sql.functions._
>>>>
>>>>
>>>> case class RecordExample(a: Int, b: String)
>>>>
>>>> object ArrayExample {
>>>>   def main(args: Array[String]) {
>>>>     val conf = new SparkConf()
>>>>
>>>>     val sc = new SparkContext(conf)
>>>>     val sqlContext = new HiveContext(sc)
>>>>
>>>>     import sqlContext.implicits._
>>>>
>>>>     val dataset = sc.parallelize(Seq(RecordExample(1, "apple"), RecordExample(2, "orange"))).toDS()
>>>>
>>>>     dataset.groupBy($"a").agg(collect_list("b").as[List[String]])
>>>>
>>>>     dataset.collect()
>>>>
>>>>   }
>>>>
>>>> }
>>>>
>>>>
>>>> and it fails with the following (please see the whole stack trace
>>>> below):
>>>>
>>>>  Exception in thread "main" java.lang.ClassCastException:
>>>> org.apache.spark.sql.types.ArrayType cannot be cast to
>>>> org.apache.spark.sql.types.StructType
>>>>
>>>>
>>>> Could please someone point me to the proper way to do that or confirm
>>>> it's a bug?
>>>>
>>>> Thank you and here is the whole stacktrace:
>>>>
>>>> Exception in thread "main" java.lang.ClassCastException:
>>>> org.apache.spark.sql.types.ArrayType cannot be cast to
>>>> org.apache.spark.sql.types.StructType
>>>> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
>>>> $apache$spark$sql$catalyst$expressions$GetStructField$$field$lzycompute(complexTypeExtractors.scala:107)
>>>> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
>>>> $apache$spark$sql$catalyst$expressions$GetStructField$$field(complexTypeExtractors.scala:107)
>>>> at
>>>> org.apache.spark.sql.catalyst.expressions.GetStructField.dataType(complexTypeExtractors.scala:109)
>>>> at
>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1214)
>>>> at
>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1211)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>> at
>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>> at
>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>> at
>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>> at
>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>> at
>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>> at
>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>> at
>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>> at
>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>> at
>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>> at
>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
>>>> at
>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>> at
>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>>> at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>>> at scala.collection.immutable.List.map(List.scala:285)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>> at
>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>> at
>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>> at
>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>> at
>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>> at
>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
>>>> at
>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>> at
>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>> at
>>>> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
>>>> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
>>>> at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>>> at scala.collection.AbstractTraversable.map(Traversable.scala:104)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>> at
>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>> at
>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>> at
>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>> at
>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>> at
>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>> at
>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>> at
>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>> at
>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>> at
>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>> at
>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>>> at
>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionDown$1(QueryPlan.scala:75)
>>>> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
>>>> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:85)
>>>> at
>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1$1.apply(QueryPlan.scala:89)
>>>> at
>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>> at
>>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>>> at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>>> at scala.collection.immutable.List.map(List.scala:285)
>>>> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
>>>> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:89)
>>>> at
>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$1.apply(QueryPlan.scala:93)
>>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>>> at
>>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>>> at
>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>>> at
>>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>>> at
>>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>>> at
>>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>>> at
>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsDown(QueryPlan.scala:93)
>>>> at
>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressions(QueryPlan.scala:64)
>>>> at
>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:134)
>>>> at
>>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:133)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
>>>> at
>>>> org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:233)
>>>> at
>>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformAllExpressions(QueryPlan.scala:133)
>>>> at
>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1211)
>>>> at
>>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1195)
>>>> at
>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:83)
>>>> at
>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:80)
>>>> at
>>>> scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
>>>> at scala.collection.immutable.List.foldLeft(List.scala:84)
>>>> at
>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:80)
>>>> at
>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:72)
>>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>>> at
>>>> org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:72)
>>>> at
>>>> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.resolve(ExpressionEncoder.scala:253)
>>>> at org.apache.spark.sql.Dataset.<init>(Dataset.scala:78)
>>>> at
>>>> org.apache.spark.sql.GroupedDataset.aggUntyped(GroupedDataset.scala:240)
>>>> at org.apache.spark.sql.GroupedDataset.agg(GroupedDataset.scala:253)
>>>> at ArrayExample$.main(ArrayExample.scala:22)
>>>> at ArrayExample.main(ArrayExample.scala)
>>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>> at
>>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>>> at
>>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>> at java.lang.reflect.Method.invoke(Method.java:497)
>>>> at
>>>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>>>> at
>>>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>>>> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>>>> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>>>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>>>
>>>>
>>>>
>>>>
>>>
>>
>

Re: Failed to 'collect_set' with dataset in spark 1.6

Posted by Alexandr Dzhagriev <dz...@gmail.com>.
Hello again,

Also I've tried the following snippet with concat_ws:

val dataset = sc.parallelize(Seq(
  RecordExample(1, "apple"),
  RecordExample(1, "banana"),
  RecordExample(2, "orange"))
).toDS().groupBy($"a").agg(concat_ws(",", $"b").as[String])

dataset.take(10).foreach(println)


which also fails

Exception in thread "main" org.apache.spark.sql.AnalysisException:
expression 'b' is neither present in the group by, nor is it an aggregate
function. Add to group by or wrap in first() (or first_value) if you don't
care which value you get.;
at
org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.failAnalysis(CheckAnalysis.scala:38)
at
org.apache.spark.sql.catalyst.analysis.Analyzer.failAnalysis(Analyzer.scala:44)
at
org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$
1.org
$apache$spark$sql$catalyst$analysis$CheckAnalysis$class$$anonfun$$checkValidAggregateExpression$1(CheckAnalysis.scala:130)

Thanks, Alex.

On Mon, Feb 1, 2016 at 6:03 PM, Alexandr Dzhagriev <dz...@gmail.com> wrote:

> Hi Ted,
>
> That doesn't help neither as one method delegates to another as far as I
> can see:
>
> def collect_list(columnName: String): Column = collect_list(Column(columnName))
>
>
> Thanks, Alex
>
> On Mon, Feb 1, 2016 at 5:55 PM, Ted Yu <yu...@gmail.com> wrote:
>
>> bq. agg(collect_list("b")
>>
>> Have you tried:
>>
>> agg(collect_list($"b")
>>
>> On Mon, Feb 1, 2016 at 8:50 AM, Alexandr Dzhagriev <dz...@gmail.com>
>> wrote:
>>
>>> Hello,
>>>
>>> I'm trying to run the following example code:
>>>
>>> import org.apache.spark.sql.hive.HiveContext
>>> import org.apache.spark.{SparkContext, SparkConf}
>>> import org.apache.spark.sql.functions._
>>>
>>>
>>> case class RecordExample(a: Int, b: String)
>>>
>>> object ArrayExample {
>>>   def main(args: Array[String]) {
>>>     val conf = new SparkConf()
>>>
>>>     val sc = new SparkContext(conf)
>>>     val sqlContext = new HiveContext(sc)
>>>
>>>     import sqlContext.implicits._
>>>
>>>     val dataset = sc.parallelize(Seq(RecordExample(1, "apple"), RecordExample(2, "orange"))).toDS()
>>>
>>>     dataset.groupBy($"a").agg(collect_list("b").as[List[String]])
>>>
>>>     dataset.collect()
>>>
>>>   }
>>>
>>> }
>>>
>>>
>>> and it fails with the following (please see the whole stack trace below):
>>>
>>>  Exception in thread "main" java.lang.ClassCastException:
>>> org.apache.spark.sql.types.ArrayType cannot be cast to
>>> org.apache.spark.sql.types.StructType
>>>
>>>
>>> Could please someone point me to the proper way to do that or confirm
>>> it's a bug?
>>>
>>> Thank you and here is the whole stacktrace:
>>>
>>> Exception in thread "main" java.lang.ClassCastException:
>>> org.apache.spark.sql.types.ArrayType cannot be cast to
>>> org.apache.spark.sql.types.StructType
>>> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
>>> $apache$spark$sql$catalyst$expressions$GetStructField$$field$lzycompute(complexTypeExtractors.scala:107)
>>> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
>>> $apache$spark$sql$catalyst$expressions$GetStructField$$field(complexTypeExtractors.scala:107)
>>> at
>>> org.apache.spark.sql.catalyst.expressions.GetStructField.dataType(complexTypeExtractors.scala:109)
>>> at
>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1214)
>>> at
>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1211)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>> at
>>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>> at
>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>> at
>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>> at
>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>> at
>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>> at
>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>> at
>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>> at
>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>> at
>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>> at
>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>> at
>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
>>> at
>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>> at
>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>> at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>> at scala.collection.immutable.List.map(List.scala:285)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>> at
>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>> at
>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>> at
>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>> at
>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>> at
>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
>>> at
>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>> at
>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>> at
>>> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
>>> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
>>> at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>> at scala.collection.AbstractTraversable.map(Traversable.scala:104)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>> at
>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>> at
>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>> at
>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>> at
>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>> at
>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>> at
>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>> at
>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>> at
>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>> at
>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>> at
>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>>> at
>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionDown$1(QueryPlan.scala:75)
>>> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
>>> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:85)
>>> at
>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1$1.apply(QueryPlan.scala:89)
>>> at
>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>> at
>>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>> at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>>> at scala.collection.immutable.List.map(List.scala:285)
>>> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
>>> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:89)
>>> at
>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$1.apply(QueryPlan.scala:93)
>>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>>> at
>>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>>> at
>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>>> at
>>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>>> at
>>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>>> at
>>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>>> at
>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsDown(QueryPlan.scala:93)
>>> at
>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressions(QueryPlan.scala:64)
>>> at
>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:134)
>>> at
>>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:133)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>>> at
>>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
>>> at
>>> org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:233)
>>> at
>>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformAllExpressions(QueryPlan.scala:133)
>>> at
>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1211)
>>> at
>>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1195)
>>> at
>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:83)
>>> at
>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:80)
>>> at
>>> scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
>>> at scala.collection.immutable.List.foldLeft(List.scala:84)
>>> at
>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:80)
>>> at
>>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:72)
>>> at scala.collection.immutable.List.foreach(List.scala:381)
>>> at
>>> org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:72)
>>> at
>>> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.resolve(ExpressionEncoder.scala:253)
>>> at org.apache.spark.sql.Dataset.<init>(Dataset.scala:78)
>>> at
>>> org.apache.spark.sql.GroupedDataset.aggUntyped(GroupedDataset.scala:240)
>>> at org.apache.spark.sql.GroupedDataset.agg(GroupedDataset.scala:253)
>>> at ArrayExample$.main(ArrayExample.scala:22)
>>> at ArrayExample.main(ArrayExample.scala)
>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>> at
>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>> at
>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>> at java.lang.reflect.Method.invoke(Method.java:497)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>>> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>>> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>>
>>>
>>>
>>>
>>
>

Re: Failed to 'collect_set' with dataset in spark 1.6

Posted by Alexandr Dzhagriev <dz...@gmail.com>.
Hi Ted,

That doesn't help neither as one method delegates to another as far as I
can see:

def collect_list(columnName: String): Column = collect_list(Column(columnName))


Thanks, Alex

On Mon, Feb 1, 2016 at 5:55 PM, Ted Yu <yu...@gmail.com> wrote:

> bq. agg(collect_list("b")
>
> Have you tried:
>
> agg(collect_list($"b")
>
> On Mon, Feb 1, 2016 at 8:50 AM, Alexandr Dzhagriev <dz...@gmail.com>
> wrote:
>
>> Hello,
>>
>> I'm trying to run the following example code:
>>
>> import org.apache.spark.sql.hive.HiveContext
>> import org.apache.spark.{SparkContext, SparkConf}
>> import org.apache.spark.sql.functions._
>>
>>
>> case class RecordExample(a: Int, b: String)
>>
>> object ArrayExample {
>>   def main(args: Array[String]) {
>>     val conf = new SparkConf()
>>
>>     val sc = new SparkContext(conf)
>>     val sqlContext = new HiveContext(sc)
>>
>>     import sqlContext.implicits._
>>
>>     val dataset = sc.parallelize(Seq(RecordExample(1, "apple"), RecordExample(2, "orange"))).toDS()
>>
>>     dataset.groupBy($"a").agg(collect_list("b").as[List[String]])
>>
>>     dataset.collect()
>>
>>   }
>>
>> }
>>
>>
>> and it fails with the following (please see the whole stack trace below):
>>
>>  Exception in thread "main" java.lang.ClassCastException:
>> org.apache.spark.sql.types.ArrayType cannot be cast to
>> org.apache.spark.sql.types.StructType
>>
>>
>> Could please someone point me to the proper way to do that or confirm
>> it's a bug?
>>
>> Thank you and here is the whole stacktrace:
>>
>> Exception in thread "main" java.lang.ClassCastException:
>> org.apache.spark.sql.types.ArrayType cannot be cast to
>> org.apache.spark.sql.types.StructType
>> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
>> $apache$spark$sql$catalyst$expressions$GetStructField$$field$lzycompute(complexTypeExtractors.scala:107)
>> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
>> $apache$spark$sql$catalyst$expressions$GetStructField$$field(complexTypeExtractors.scala:107)
>> at
>> org.apache.spark.sql.catalyst.expressions.GetStructField.dataType(complexTypeExtractors.scala:109)
>> at
>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1214)
>> at
>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1211)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>> at
>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>> at
>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>> at
>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>> at
>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>> at
>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>> at
>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>> at
>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>> at
>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>> at
>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>> at
>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>> at
>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
>> at
>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>> at
>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>> at scala.collection.immutable.List.foreach(List.scala:381)
>> at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>> at scala.collection.immutable.List.map(List.scala:285)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>> at
>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>> at
>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>> at
>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>> at
>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>> at
>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
>> at
>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>> at
>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>> at
>> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
>> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
>> at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>> at scala.collection.AbstractTraversable.map(Traversable.scala:104)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>> at
>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>> at
>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>> at
>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>> at
>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>> at
>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>> at
>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>> at
>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>> at
>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>> at
>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>> at
>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
>> at
>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionDown$1(QueryPlan.scala:75)
>> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
>> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:85)
>> at
>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1$1.apply(QueryPlan.scala:89)
>> at
>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>> at
>> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>> at scala.collection.immutable.List.foreach(List.scala:381)
>> at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>> at scala.collection.immutable.List.map(List.scala:285)
>> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
>> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:89)
>> at
>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$1.apply(QueryPlan.scala:93)
>> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
>> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
>> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
>> at
>> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
>> at
>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
>> at
>> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
>> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
>> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
>> at
>> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
>> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
>> at
>> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
>> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
>> at
>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsDown(QueryPlan.scala:93)
>> at
>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressions(QueryPlan.scala:64)
>> at
>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:134)
>> at
>> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:133)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
>> at
>> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
>> at
>> org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:233)
>> at
>> org.apache.spark.sql.catalyst.plans.QueryPlan.transformAllExpressions(QueryPlan.scala:133)
>> at
>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1211)
>> at
>> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1195)
>> at
>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:83)
>> at
>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:80)
>> at
>> scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
>> at scala.collection.immutable.List.foldLeft(List.scala:84)
>> at
>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:80)
>> at
>> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:72)
>> at scala.collection.immutable.List.foreach(List.scala:381)
>> at
>> org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:72)
>> at
>> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.resolve(ExpressionEncoder.scala:253)
>> at org.apache.spark.sql.Dataset.<init>(Dataset.scala:78)
>> at
>> org.apache.spark.sql.GroupedDataset.aggUntyped(GroupedDataset.scala:240)
>> at org.apache.spark.sql.GroupedDataset.agg(GroupedDataset.scala:253)
>> at ArrayExample$.main(ArrayExample.scala:22)
>> at ArrayExample.main(ArrayExample.scala)
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>> at java.lang.reflect.Method.invoke(Method.java:497)
>> at
>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>> at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>
>>
>>
>>
>

Re: Failed to 'collect_set' with dataset in spark 1.6

Posted by Ted Yu <yu...@gmail.com>.
bq. agg(collect_list("b")

Have you tried:

agg(collect_list($"b")

On Mon, Feb 1, 2016 at 8:50 AM, Alexandr Dzhagriev <dz...@gmail.com> wrote:

> Hello,
>
> I'm trying to run the following example code:
>
> import org.apache.spark.sql.hive.HiveContext
> import org.apache.spark.{SparkContext, SparkConf}
> import org.apache.spark.sql.functions._
>
>
> case class RecordExample(a: Int, b: String)
>
> object ArrayExample {
>   def main(args: Array[String]) {
>     val conf = new SparkConf()
>
>     val sc = new SparkContext(conf)
>     val sqlContext = new HiveContext(sc)
>
>     import sqlContext.implicits._
>
>     val dataset = sc.parallelize(Seq(RecordExample(1, "apple"), RecordExample(2, "orange"))).toDS()
>
>     dataset.groupBy($"a").agg(collect_list("b").as[List[String]])
>
>     dataset.collect()
>
>   }
>
> }
>
>
> and it fails with the following (please see the whole stack trace below):
>
>  Exception in thread "main" java.lang.ClassCastException:
> org.apache.spark.sql.types.ArrayType cannot be cast to
> org.apache.spark.sql.types.StructType
>
>
> Could please someone point me to the proper way to do that or confirm it's
> a bug?
>
> Thank you and here is the whole stacktrace:
>
> Exception in thread "main" java.lang.ClassCastException:
> org.apache.spark.sql.types.ArrayType cannot be cast to
> org.apache.spark.sql.types.StructType
> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
> $apache$spark$sql$catalyst$expressions$GetStructField$$field$lzycompute(complexTypeExtractors.scala:107)
> at org.apache.spark.sql.catalyst.expressions.GetStructField.org
> $apache$spark$sql$catalyst$expressions$GetStructField$$field(complexTypeExtractors.scala:107)
> at
> org.apache.spark.sql.catalyst.expressions.GetStructField.dataType(complexTypeExtractors.scala:109)
> at
> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1214)
> at
> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$$anonfun$apply$24.applyOrElse(Analyzer.scala:1211)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
> at
> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
> at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
> at
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
> at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
> at
> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
> at
> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
> at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
> at
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
> at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
> at
> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
> at
> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
> at scala.collection.immutable.List.foreach(List.scala:381)
> at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
> at scala.collection.immutable.List.map(List.scala:285)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
> at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
> at
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
> at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
> at
> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
> at
> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4$$anonfun$apply$9.apply(TreeNode.scala:294)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
> at
> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
> at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
> at scala.collection.AbstractTraversable.map(Traversable.scala:104)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:292)
> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
> at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
> at
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
> at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
> at
> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
> at
> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
> at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
> at
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
> at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
> at
> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
> at
> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:248)
> at
> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionDown$1(QueryPlan.scala:75)
> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:85)
> at
> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1$1.apply(QueryPlan.scala:89)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
> at scala.collection.immutable.List.foreach(List.scala:381)
> at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
> at scala.collection.immutable.List.map(List.scala:285)
> at org.apache.spark.sql.catalyst.plans.QueryPlan.org
> $apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1(QueryPlan.scala:89)
> at
> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$1.apply(QueryPlan.scala:93)
> at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
> at scala.collection.Iterator$class.foreach(Iterator.scala:742)
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
> at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
> at
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
> at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
> at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
> at scala.collection.AbstractIterator.to(Iterator.scala:1194)
> at
> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
> at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
> at
> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
> at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
> at
> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsDown(QueryPlan.scala:93)
> at
> org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressions(QueryPlan.scala:64)
> at
> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:134)
> at
> org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$transformAllExpressions$1.applyOrElse(QueryPlan.scala:133)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:243)
> at
> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:242)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:233)
> at
> org.apache.spark.sql.catalyst.plans.QueryPlan.transformAllExpressions(QueryPlan.scala:133)
> at
> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1211)
> at
> org.apache.spark.sql.catalyst.analysis.ResolveUpCast$.apply(Analyzer.scala:1195)
> at
> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:83)
> at
> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:80)
> at
> scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
> at scala.collection.immutable.List.foldLeft(List.scala:84)
> at
> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:80)
> at
> org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:72)
> at scala.collection.immutable.List.foreach(List.scala:381)
> at
> org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:72)
> at
> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.resolve(ExpressionEncoder.scala:253)
> at org.apache.spark.sql.Dataset.<init>(Dataset.scala:78)
> at org.apache.spark.sql.GroupedDataset.aggUntyped(GroupedDataset.scala:240)
> at org.apache.spark.sql.GroupedDataset.agg(GroupedDataset.scala:253)
> at ArrayExample$.main(ArrayExample.scala:22)
> at ArrayExample.main(ArrayExample.scala)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:497)
> at
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
> at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>
>
>
>