You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2015/07/11 08:25:56 UTC
spark git commit: [SPARK-8970][SQL] remove unnecessary abstraction
for ExtractValue
Repository: spark
Updated Branches:
refs/heads/master 0c5207c66 -> c472eb17a
[SPARK-8970][SQL] remove unnecessary abstraction for ExtractValue
Author: Wenchen Fan <cl...@outlook.com>
Closes #7339 from cloud-fan/minor and squashes the following commits:
84a2128 [Wenchen Fan] remove unapply
6a37c12 [Wenchen Fan] remove unnecessary abstraction for ExtractValue
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c472eb17
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c472eb17
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c472eb17
Branch: refs/heads/master
Commit: c472eb17ae7f0910f304e414ea5ccbb77a9e153a
Parents: 0c5207c
Author: Wenchen Fan <cl...@outlook.com>
Authored: Fri Jul 10 23:25:11 2015 -0700
Committer: Reynold Xin <rx...@databricks.com>
Committed: Fri Jul 10 23:25:11 2015 -0700
----------------------------------------------------------------------
.../spark/sql/catalyst/analysis/Analyzer.scala | 3 +-
.../expressions/complexTypeExtractors.scala | 36 ++++----------------
.../sql/catalyst/optimizer/Optimizer.scala | 8 +++--
3 files changed, 15 insertions(+), 32 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/c472eb17/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 3fdc6d6..891408e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -141,7 +141,8 @@ class Analyzer(
child match {
case _: UnresolvedAttribute => u
case ne: NamedExpression => ne
- case ev: ExtractValueWithStruct => Alias(ev, ev.field.name)()
+ case g: GetStructField => Alias(g, g.field.name)()
+ case g: GetArrayStructFields => Alias(g, g.field.name)()
case g: Generator if g.resolved && g.elementTypes.size > 1 => MultiAlias(g, Nil)
case e if !e.resolved => u
case other => Alias(other, s"_c$i")()
http://git-wip-us.apache.org/repos/asf/spark/blob/c472eb17/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala
index 73cc930..5504781 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala
@@ -78,12 +78,6 @@ object ExtractValue {
}
}
- def unapply(g: ExtractValue): Option[(Expression, Expression)] = g match {
- case o: GetArrayItem => Some((o.child, o.ordinal))
- case o: GetMapValue => Some((o.child, o.key))
- case s: ExtractValueWithStruct => Some((s.child, null))
- }
-
/**
* Find the ordinal of StructField, report error if no desired field or over one
* desired fields are found.
@@ -104,31 +98,16 @@ object ExtractValue {
}
/**
- * A common interface of all kinds of extract value expressions.
- * Note: concrete extract value expressions are created only by `ExtractValue.apply`,
- * we don't need to do type check for them.
- */
-trait ExtractValue {
- self: Expression =>
-}
-
-abstract class ExtractValueWithStruct extends UnaryExpression with ExtractValue {
- self: Product =>
-
- def field: StructField
- override def toString: String = s"$child.${field.name}"
-}
-
-/**
* Returns the value of fields in the Struct `child`.
*
* No need to do type checking since it is handled by [[ExtractValue]].
*/
case class GetStructField(child: Expression, field: StructField, ordinal: Int)
- extends ExtractValueWithStruct {
+ extends UnaryExpression {
override def dataType: DataType = field.dataType
override def nullable: Boolean = child.nullable || field.nullable
+ override def toString: String = s"$child.${field.name}"
protected override def nullSafeEval(input: Any): Any =
input.asInstanceOf[InternalRow](ordinal)
@@ -155,10 +134,11 @@ case class GetArrayStructFields(
child: Expression,
field: StructField,
ordinal: Int,
- containsNull: Boolean) extends ExtractValueWithStruct {
+ containsNull: Boolean) extends UnaryExpression {
override def dataType: DataType = ArrayType(field.dataType, containsNull)
override def nullable: Boolean = child.nullable || containsNull || field.nullable
+ override def toString: String = s"$child.${field.name}"
protected override def nullSafeEval(input: Any): Any = {
input.asInstanceOf[Seq[InternalRow]].map { row =>
@@ -191,8 +171,7 @@ case class GetArrayStructFields(
*
* No need to do type checking since it is handled by [[ExtractValue]].
*/
-case class GetArrayItem(child: Expression, ordinal: Expression)
- extends BinaryExpression with ExtractValue {
+case class GetArrayItem(child: Expression, ordinal: Expression) extends BinaryExpression {
override def toString: String = s"$child[$ordinal]"
@@ -231,12 +210,11 @@ case class GetArrayItem(child: Expression, ordinal: Expression)
}
/**
- * Returns the value of key `ordinal` in Map `child`.
+ * Returns the value of key `key` in Map `child`.
*
* No need to do type checking since it is handled by [[ExtractValue]].
*/
-case class GetMapValue(child: Expression, key: Expression)
- extends BinaryExpression with ExtractValue {
+case class GetMapValue(child: Expression, key: Expression) extends BinaryExpression {
override def toString: String = s"$child[$key]"
http://git-wip-us.apache.org/repos/asf/spark/blob/c472eb17/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
index 7d41ef9..5d80214 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
@@ -275,8 +275,12 @@ object NullPropagation extends Rule[LogicalPlan] {
case e @ Count(Literal(null, _)) => Cast(Literal(0L), e.dataType)
case e @ IsNull(c) if !c.nullable => Literal.create(false, BooleanType)
case e @ IsNotNull(c) if !c.nullable => Literal.create(true, BooleanType)
- case e @ ExtractValue(Literal(null, _), _) => Literal.create(null, e.dataType)
- case e @ ExtractValue(_, Literal(null, _)) => Literal.create(null, e.dataType)
+ case e @ GetArrayItem(Literal(null, _), _) => Literal.create(null, e.dataType)
+ case e @ GetArrayItem(_, Literal(null, _)) => Literal.create(null, e.dataType)
+ case e @ GetMapValue(Literal(null, _), _) => Literal.create(null, e.dataType)
+ case e @ GetMapValue(_, Literal(null, _)) => Literal.create(null, e.dataType)
+ case e @ GetStructField(Literal(null, _), _, _) => Literal.create(null, e.dataType)
+ case e @ GetArrayStructFields(Literal(null, _), _, _, _) => Literal.create(null, e.dataType)
case e @ EqualNullSafe(Literal(null, _), r) => IsNull(r)
case e @ EqualNullSafe(l, Literal(null, _)) => IsNull(l)
case e @ Count(expr) if !expr.nullable => Count(Literal(1))
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org