You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2017/03/24 23:02:12 UTC
spark git commit: Disable generate codegen since it fails my workload.
Repository: spark
Updated Branches:
refs/heads/master 91fa80fe8 -> b5c5bd98e
Disable generate codegen since it fails my workload.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b5c5bd98
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b5c5bd98
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b5c5bd98
Branch: refs/heads/master
Commit: b5c5bd98ea5e8dbfebcf86c5459bdf765f5ceb53
Parents: 91fa80f
Author: Reynold Xin <rx...@databricks.com>
Authored: Fri Mar 24 23:57:29 2017 +0100
Committer: Reynold Xin <rx...@databricks.com>
Committed: Fri Mar 24 23:57:29 2017 +0100
----------------------------------------------------------------------
.../spark/sql/execution/GenerateExec.scala | 2 +-
.../sql/execution/WholeStageCodegenSuite.scala | 28 --------------------
2 files changed, 1 insertion(+), 29 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/b5c5bd98/sql/core/src/main/scala/org/apache/spark/sql/execution/GenerateExec.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/GenerateExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/GenerateExec.scala
index 69be709..f87d058 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/GenerateExec.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/GenerateExec.scala
@@ -119,7 +119,7 @@ case class GenerateExec(
}
}
- override def supportCodegen: Boolean = generator.supportCodegen
+ override def supportCodegen: Boolean = false
override def inputRDDs(): Seq[RDD[InternalRow]] = {
child.asInstanceOf[CodegenSupport].inputRDDs()
http://git-wip-us.apache.org/repos/asf/spark/blob/b5c5bd98/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSuite.scala
index 4d92035..a4b30a2 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSuite.scala
@@ -116,34 +116,6 @@ class WholeStageCodegenSuite extends SparkPlanTest with SharedSQLContext {
assert(ds.collect() === Array(("a", 10.0), ("b", 3.0), ("c", 1.0)))
}
- test("generate should be included in WholeStageCodegen") {
- import org.apache.spark.sql.functions._
- val ds = spark.range(2).select(
- col("id"),
- explode(array(col("id") + 1, col("id") + 2)).as("value"))
- val plan = ds.queryExecution.executedPlan
- assert(plan.find(p =>
- p.isInstanceOf[WholeStageCodegenExec] &&
- p.asInstanceOf[WholeStageCodegenExec].child.isInstanceOf[GenerateExec]).isDefined)
- assert(ds.collect() === Array(Row(0, 1), Row(0, 2), Row(1, 2), Row(1, 3)))
- }
-
- test("large stack generator should not use WholeStageCodegen") {
- def createStackGenerator(rows: Int): SparkPlan = {
- val id = UnresolvedAttribute("id")
- val stack = Stack(Literal(rows) +: Seq.tabulate(rows)(i => Add(id, Literal(i))))
- spark.range(500).select(Column(stack)).queryExecution.executedPlan
- }
- val isCodeGenerated: SparkPlan => Boolean = {
- case WholeStageCodegenExec(_: GenerateExec) => true
- case _ => false
- }
-
- // Only 'stack' generators that produce 50 rows or less are code generated.
- assert(createStackGenerator(50).find(isCodeGenerated).isDefined)
- assert(createStackGenerator(100).find(isCodeGenerated).isEmpty)
- }
-
test("SPARK-19512 codegen for comparing structs is incorrect") {
// this would raise CompileException before the fix
spark.range(10)
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org