You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by gatorsmile <gi...@git.apache.org> on 2018/12/03 23:49:15 UTC
[GitHub] spark pull request #22899: [SPARK-25573] Combine resolveExpression and resol...
Github user gatorsmile commented on a diff in the pull request:
https://github.com/apache/spark/pull/22899#discussion_r238483571
--- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala ---
@@ -880,21 +880,38 @@ class Analyzer(
}
}
- private def resolve(e: Expression, q: LogicalPlan): Expression = e match {
- case f: LambdaFunction if !f.bound => f
- case u @ UnresolvedAttribute(nameParts) =>
- // Leave unchanged if resolution fails. Hopefully will be resolved next round.
- val result =
- withPosition(u) {
- q.resolveChildren(nameParts, resolver)
- .orElse(resolveLiteralFunction(nameParts, u, q))
- .getOrElse(u)
- }
- logDebug(s"Resolving $u to $result")
- result
- case UnresolvedExtractValue(child, fieldExpr) if child.resolved =>
- ExtractValue(child, fieldExpr, resolver)
- case _ => e.mapChildren(resolve(_, q))
+ /**
+ * Resolves the attribute and extract value expressions(s) by traversing the
+ * input expression in top down manner. The traversal is done in top-down manner as
+ * we need to skip over unbound lamda function expression. The lamda expressions are
+ * resolved in a different rule [[ResolveLambdaVariables]]
+ *
+ * Example :
+ * SELECT transform(array(1, 2, 3), (x, i) -> x + i)"
+ *
+ * In the case above, x and i are resolved as lamda variables in [[ResolveLambdaVariables]]
+ *
+ * Note : In this routine, the unresolved attributes are resolved from the input plan's
+ * children attributes.
+ */
+ private def resolveExpressionTopDown(e: Expression, q: LogicalPlan): Expression = {
+ if (e.resolved) return e
--- End diff --
A good catch!
---
---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org