You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Michael Armbrust (JIRA)" <ji...@apache.org> on 2014/09/13 08:04:33 UTC

[jira] [Resolved] (SPARK-3455) **HotFix** Unit test failed due to can not resolve the attribute references

     [ https://issues.apache.org/jira/browse/SPARK-3455?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Michael Armbrust resolved SPARK-3455.
-------------------------------------
    Resolution: Fixed

> **HotFix** Unit test failed due to can not resolve the attribute references
> ---------------------------------------------------------------------------
>
>                 Key: SPARK-3455
>                 URL: https://issues.apache.org/jira/browse/SPARK-3455
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>            Reporter: Cheng Hao
>            Priority: Blocker
>
> The test case "SPARK-3349 partitioning after limit" failed, the exception as :
> {panel}
> 23:10:04.117 ERROR org.apache.spark.scheduler.TaskSetManager: Task 0 in stage 274.0 failed 1 times; aborting job
> [info] - SPARK-3349 partitioning after limit *** FAILED ***
> [info]   Exception thrown while executing query:
> [info]   == Parsed Logical Plan ==
> [info]   Project [*]
> [info]    Join Inner, Some(('subset1.n = 'lowerCaseData.n))
> [info]     UnresolvedRelation None, lowerCaseData, None
> [info]     UnresolvedRelation None, subset1, None
> [info]   
> [info]   == Analyzed Logical Plan ==
> [info]   Project [n#605,l#606,n#12]
> [info]    Join Inner, Some((n#12 = n#605))
> [info]     SparkLogicalPlan (ExistingRdd [n#605,l#606], MapPartitionsRDD[13] at mapPartitions at basicOperators.scala:219)
> [info]     Limit 2
> [info]      Sort [n#12 DESC]
> [info]       Distinct 
> [info]        Project [n#12]
> [info]         SparkLogicalPlan (ExistingRdd [n#607,l#608], MapPartitionsRDD[13] at mapPartitions at basicOperators.scala:219)
> [info]   
> [info]   == Optimized Logical Plan ==
> [info]   Project [n#605,l#606,n#12]
> [info]    Join Inner, Some((n#12 = n#605))
> [info]     SparkLogicalPlan (ExistingRdd [n#605,l#606], MapPartitionsRDD[13] at mapPartitions at basicOperators.scala:219)
> [info]     Limit 2
> [info]      Sort [n#12 DESC]
> [info]       Distinct 
> [info]        Project [n#12]
> [info]         SparkLogicalPlan (ExistingRdd [n#607,l#608], MapPartitionsRDD[13] at mapPartitions at basicOperators.scala:219)
> [info]   
> [info]   == Physical Plan ==
> [info]   Project [n#605,l#606,n#12]
> [info]    ShuffledHashJoin [n#605], [n#12], BuildRight
> [info]     Exchange (HashPartitioning [n#605], 10)
> [info]      ExistingRdd [n#605,l#606], MapPartitionsRDD[13] at mapPartitions at basicOperators.scala:219
> [info]     Exchange (HashPartitioning [n#12], 10)
> [info]      TakeOrdered 2, [n#12 DESC]
> [info]       Distinct false
> [info]        Exchange (HashPartitioning [n#12], 10)
> [info]         Distinct true
> [info]          Project [n#12]
> [info]           ExistingRdd [n#607,l#608], MapPartitionsRDD[13] at mapPartitions at basicOperators.scala:219
> [info]   
> [info]   Code Generation: false
> [info]   == RDD ==
> [info]   == Exception ==
> [info]   org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
> [info]   Exchange (HashPartitioning [n#12], 10)
> [info]    TakeOrdered 2, [n#12 DESC]
> [info]     Distinct false
> [info]      Exchange (HashPartitioning [n#12], 10)
> [info]       Distinct true
> [info]        Project [n#12]
> [info]         ExistingRdd [n#607,l#608], MapPartitionsRDD[13] at mapPartitions at basicOperators.scala:219
> [info]   
> [info]   org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute, tree:
> [info]   Exchange (HashPartitioning [n#12], 10)
> [info]    TakeOrdered 2, [n#12 DESC]
> [info]     Distinct false
> [info]      Exchange (HashPartitioning [n#12], 10)
> [info]       Distinct true
> [info]        Project [n#12]
> [info]         ExistingRdd [n#607,l#608], MapPartitionsRDD[13] at mapPartitions at basicOperators.scala:219
> [info]   
> [info]   	at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:47)
> [info]   	at org.apache.spark.sql.execution.Exchange.execute(Exchange.scala:44)
> [info]   	at org.apache.spark.sql.execution.ShuffledHashJoin.execute(joins.scala:354)
> [info]   	at org.apache.spark.sql.execution.Project.execute(basicOperators.scala:42)
> [info]   	at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:85)
> [info]   	at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:438)
> [info]   	at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:40)
> [info]   	at org.apache.spark.sql.SQLQuerySuite$$anonfun$31.apply$mcV$sp(SQLQuerySuite.scala:369)
> [info]   	at org.apache.spark.sql.SQLQuerySuite$$anonfun$31.apply(SQLQuerySuite.scala:362)
> [info]   	at org.apache.spark.sql.SQLQuerySuite$$anonfun$31.apply(SQLQuerySuite.scala:362)
> [info]   	at org.scalatest.Transformer$$anonfun$apply$1.apply(Transformer.scala:22)
> [info]   	at org.scalatest.Transformer$$anonfun$apply$1.apply(Transformer.scala:22)
> [info]   	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
> [info]   	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
> [info]   	at org.scalatest.Transformer.apply(Transformer.scala:22)
> [info]   	at org.scalatest.Transformer.apply(Transformer.scala:20)
> [info]   	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:158)
> [info]   	at org.scalatest.Suite$class.withFixture(Suite.scala:1121)
> [info]   	at org.scalatest.FunSuite.withFixture(FunSuite.scala:1559)
> [info]   	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:155)
> [info]   	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:167)
> [info]   	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:167)
> [info]   	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
> [info]   	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:167)
> [info]   	at org.scalatest.FunSuite.runTest(FunSuite.scala:1559)
> [info]   	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:200)
> [info]   	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:200)
> [info]   	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
> [info]   	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
> [info]   	at scala.collection.immutable.List.foreach(List.scala:318)
> [info]   	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
> [info]   	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
> [info]   	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
> [info]   	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:200)
> [info]   	at org.scalatest.FunSuite.runTests(FunSuite.scala:1559)
> [info]   	at org.scalatest.Suite$class.run(Suite.scala:1423)
> [info]   	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1559)
> [info]   	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:204)
> [info]   	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:204)
> [info]   	at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
> [info]   	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:204)
> [info]   	at org.apache.spark.sql.SQLQuerySuite.org$scalatest$BeforeAndAfterAll$$super$run(SQLQuerySuite.scala:29)
> [info]   	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
> [info]   	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
> [info]   	at org.apache.spark.sql.SQLQuerySuite.run(SQLQuerySuite.scala:29)
> [info]   	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:444)
> [info]   	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:651)
> [info]   	at sbt.ForkMain$Run$2.call(ForkMain.java:294)
> [info]   	at sbt.ForkMain$Run$2.call(ForkMain.java:284)
> [info]   	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
> [info]   	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> [info]   	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> [info]   	at java.lang.Thread.run(Thread.java:745)
> [info]   Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 274.0 failed 1 times, most recent failure: Lost task 0.0 in stage 274.0 (TID 911, localhost): org.apache.spark.sql.catalyst.errors.package$TreeNodeException: Binding attribute, tree: n#12
> [info]           org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:47)
> [info]           org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.applyOrElse(BoundAttribute.scala:47)
> [info]           org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.applyOrElse(BoundAttribute.scala:46)
> [info]           org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:144)
> [info]           org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:135)
> [info]           org.apache.spark.sql.catalyst.expressions.BindReferences$.bindReference(BoundAttribute.scala:46)
> [info]           org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection$$anonfun$$init$$2.apply(Projection.scala:52)
> [info]           org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection$$anonfun$$init$$2.apply(Projection.scala:52)
> [info]           scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
> [info]           scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
> [info]           scala.collection.immutable.List.foreach(List.scala:318)
> [info]           scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
> [info]           scala.collection.AbstractTraversable.map(Traversable.scala:105)
> [info]           org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection.<init>(Projection.scala:52)
> [info]           org.apache.spark.sql.execution.SparkPlan$$anonfun$newMutableProjection$1.apply(SparkPlan.scala:106)
> [info]           org.apache.spark.sql.execution.SparkPlan$$anonfun$newMutableProjection$1.apply(SparkPlan.scala:106)
> [info]           org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:43)
> [info]           org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
> [info]           org.apache.spark.rdd.RDD$$anonfun$13.apply(RDD.scala:596)
> [info]           org.apache.spark.rdd.RDD$$anonfun$13.apply(RDD.scala:596)
> [info]           org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> [info]           org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262)
> [info]           org.apache.spark.rdd.RDD.iterator(RDD.scala:229)
> [info]           org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> [info]           org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262)
> [info]           org.apache.spark.rdd.RDD.iterator(RDD.scala:229)
> [info]           org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> [info]           org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262)
> [info]           org.apache.spark.rdd.RDD.iterator(RDD.scala:229)
> [info]           org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:68)
> [info]           org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
> [info]           org.apache.spark.scheduler.Task.run(Task.scala:54)
> [info]           org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:181)
> [info]           java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> [info]           java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> [info]           java.lang.Thread.run(Thread.java:745)
> [info]   Driver stacktrace:
> [info]   	at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1185)
> [info]   	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1174)
> [info]   	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1173)
> [info]   	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
> [info]   	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
> [info]   	at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1173)
> [info]   	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)
> [info]   	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)
> [info]   	at scala.Option.foreach(Option.scala:236)
> [info]   	at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:688)
> [info]   	at org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(DAGScheduler.scala:1391)
> [info]   	at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> [info]   	at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> [info]   	at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> [info]   	at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> [info]   	at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> [info]   	at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> [info]   	at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> [info]   	at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> [info]   	at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) (QueryTest.scala:42)
> {panel}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org